code
stringlengths
13
6.09M
order_type
stringclasses
2 values
original_example
dict
step_ids
listlengths
1
5
# coding: utf-8 # In[1]: import pandas as pd import numpy as np import itertools # Save a nice dark grey as a variable almost_black = '#262626' import matplotlib import seaborn as sns import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap sns.set() get_ipython().magic('matplotlib inline') # In[2]: filepath = 'data/full_data_genre.csv' header = None header_included = True if header_included: header = 0 df = pd.read_csv(filepath, header = header) df['billboard'] = df['weeks'].map(lambda x: x != 0) df = df.drop('artist', 1) df = df.drop('title', 1) genres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative'] accoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness', 'acousticness', 'instrumentalness', 'danceability', 'time_signature', 'loudness', 'duration', 'mode'] artist = ['artist_familiarity', 'artist_hottness'] var = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy', 'liveness', 'danceability','speechiness', 'instrumentalness'] X = np.array(df[var]) # In[3]: fnames = df[var].columns features = dict(zip(fnames, range(len(fnames)))) # In[4]: palette1 = sns.color_palette("Paired") flatui = ["#9b59b6", "#3498db", "#95a5a6", "#e74c3c", "#34495e", "#2ecc71"] palette2 = sns.color_palette(flatui) fs = 20 # fontsize fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15,15)) ax = list(itertools.product(range(2), range(2))) combinations = [('artist_familiarity', 'artist_hottness'), ('tempo', 'energy'), ('liveness', 'danceability'), ('instrumentalness', 'speechiness')] i = 0 for item in combinations: index = ax[i] x_vis = X[:, [features[item[0]], features[item[1]]]] axes[index[0], index[1]].scatter(x_vis[Y==0, 0], x_vis[Y==0, 1], label="Class #0", alpha=0.5, edgecolor=almost_black, facecolor=palette1[4], linewidth=0.15) axes[index[0], index[1]].scatter(x_vis[Y==1, 0], x_vis[Y==1, 1], label="Class #1", alpha=0.1, edgecolor=almost_black, facecolor=palette2[0], linewidth=0.15) axes[index[0], index[1]].set_title(item[1].capitalize(),'v.s.', item[0].capitalize(), fontsize=fs) i+=1 plt.legend() plt.show()
normal
{ "blob_id": "f2786e445bdf66cf6bb66f4cde4c7b2bf819d8aa", "index": 3299, "step-1": "<mask token>\n", "step-2": "<mask token>\nsns.set()\nget_ipython().magic('matplotlib inline')\n<mask token>\nif header_included:\n header = 0\n<mask token>\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1],\n label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor=\n palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1],\n label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor=\n palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0\n ].capitalize(), fontsize=fs)\n i += 1\nplt.legend()\nplt.show()\n", "step-3": "<mask token>\nalmost_black = '#262626'\n<mask token>\nsns.set()\nget_ipython().magic('matplotlib inline')\nfilepath = 'data/full_data_genre.csv'\nheader = None\nheader_included = True\nif header_included:\n header = 0\ndf = pd.read_csv(filepath, header=header)\ndf['billboard'] = df['weeks'].map(lambda x: x != 0)\ndf = df.drop('artist', 1)\ndf = df.drop('title', 1)\ngenres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative']\naccoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness',\n 'acousticness', 'instrumentalness', 'danceability', 'time_signature',\n 'loudness', 'duration', 'mode']\nartist = ['artist_familiarity', 'artist_hottness']\nvar = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy',\n 'liveness', 'danceability', 'speechiness', 'instrumentalness']\nX = np.array(df[var])\nfnames = df[var].columns\nfeatures = dict(zip(fnames, range(len(fnames))))\npalette1 = sns.color_palette('Paired')\nflatui = ['#9b59b6', '#3498db', '#95a5a6', '#e74c3c', '#34495e', '#2ecc71']\npalette2 = sns.color_palette(flatui)\nfs = 20\nfig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15, 15))\nax = list(itertools.product(range(2), range(2)))\ncombinations = [('artist_familiarity', 'artist_hottness'), ('tempo',\n 'energy'), ('liveness', 'danceability'), ('instrumentalness',\n 'speechiness')]\ni = 0\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1],\n label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor=\n palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1],\n label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor=\n palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0\n ].capitalize(), fontsize=fs)\n i += 1\nplt.legend()\nplt.show()\n", "step-4": "import pandas as pd\nimport numpy as np\nimport itertools\nalmost_black = '#262626'\nimport matplotlib\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nsns.set()\nget_ipython().magic('matplotlib inline')\nfilepath = 'data/full_data_genre.csv'\nheader = None\nheader_included = True\nif header_included:\n header = 0\ndf = pd.read_csv(filepath, header=header)\ndf['billboard'] = df['weeks'].map(lambda x: x != 0)\ndf = df.drop('artist', 1)\ndf = df.drop('title', 1)\ngenres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative']\naccoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness',\n 'acousticness', 'instrumentalness', 'danceability', 'time_signature',\n 'loudness', 'duration', 'mode']\nartist = ['artist_familiarity', 'artist_hottness']\nvar = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy',\n 'liveness', 'danceability', 'speechiness', 'instrumentalness']\nX = np.array(df[var])\nfnames = df[var].columns\nfeatures = dict(zip(fnames, range(len(fnames))))\npalette1 = sns.color_palette('Paired')\nflatui = ['#9b59b6', '#3498db', '#95a5a6', '#e74c3c', '#34495e', '#2ecc71']\npalette2 = sns.color_palette(flatui)\nfs = 20\nfig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15, 15))\nax = list(itertools.product(range(2), range(2)))\ncombinations = [('artist_familiarity', 'artist_hottness'), ('tempo',\n 'energy'), ('liveness', 'danceability'), ('instrumentalness',\n 'speechiness')]\ni = 0\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1],\n label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor=\n palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1],\n label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor=\n palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0\n ].capitalize(), fontsize=fs)\n i += 1\nplt.legend()\nplt.show()\n", "step-5": "\n# coding: utf-8\n\n# In[1]:\n\nimport pandas as pd\nimport numpy as np\nimport itertools\n# Save a nice dark grey as a variable\nalmost_black = '#262626'\nimport matplotlib\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nsns.set()\nget_ipython().magic('matplotlib inline')\n\n\n# In[2]:\n\nfilepath = 'data/full_data_genre.csv'\n\nheader = None\nheader_included = True\nif header_included:\n header = 0\n \ndf = pd.read_csv(filepath, header = header)\ndf['billboard'] = df['weeks'].map(lambda x: x != 0)\ndf = df.drop('artist', 1)\ndf = df.drop('title', 1)\n\ngenres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative']\naccoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness',\n 'acousticness', 'instrumentalness', 'danceability', \n 'time_signature', 'loudness', 'duration', 'mode']\nartist = ['artist_familiarity', 'artist_hottness']\n\nvar = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy', \n 'liveness', 'danceability','speechiness', 'instrumentalness']\n\nX = np.array(df[var])\n\n\n# In[3]:\n\nfnames = df[var].columns\nfeatures = dict(zip(fnames, range(len(fnames))))\n\n\n# In[4]:\n\npalette1 = sns.color_palette(\"Paired\")\nflatui = [\"#9b59b6\", \"#3498db\", \"#95a5a6\", \"#e74c3c\", \"#34495e\", \"#2ecc71\"]\npalette2 = sns.color_palette(flatui)\nfs = 20 # fontsize\nfig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15,15))\n\nax = list(itertools.product(range(2), range(2)))\n\ncombinations = [('artist_familiarity', 'artist_hottness'),\n ('tempo', 'energy'),\n ('liveness', 'danceability'),\n ('instrumentalness', 'speechiness')]\n\ni = 0\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y==0, 0], x_vis[Y==0, 1], label=\"Class #0\",\n alpha=0.5, edgecolor=almost_black, \n facecolor=palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y==1, 0], x_vis[Y==1, 1], label=\"Class #1\", \n alpha=0.1, edgecolor=almost_black, \n facecolor=palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(),'v.s.',\n item[0].capitalize(), \n fontsize=fs)\n\n i+=1\n \nplt.legend()\nplt.show()\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#!/usr/bin/python3 def file_to_code(fname): mem = [] for line in open(fname,"r"): mem.extend([int(i) for i in line.split(",")]) return mem class Opcode(object): def __init__(self, mem, ptr, code, inc): """ >>> o = Opcode([1001, 2, 4, 1], 0, 1, 4) >>> o._Opcode__par_modes [0, 1] """ if mem[ptr]%100 != code: raise Exception("Creating Opcode%d for opcode %d"%(code, mem[ptr])) self.memory = mem self.ptr = ptr self.__par_modes = list(reversed([int(i) for i in str(int(mem[ptr]/100))])) self.__ptr_inc = inc def ptr_inc(self): return self.__ptr_inc def get_val(self, arg_idx): """ >>> o = Opcode([1001, 2, 4, 1], 0, 1, 4) >>> o.get_val(1) 4 >>> o.get_val(2) 4 >>> o.get_val(3) 2 """ idx = arg_idx-1 if idx >= len(self.__par_modes) or self.__par_modes[idx] == 0: return self.memory[self.memory[self.ptr+arg_idx]] elif self.__par_modes[idx] == 1: return self.memory[self.ptr + arg_idx] def set_ptr(self): return False,0 def reads(self): raise Exception("Call to base class reads()") def writes(self): raise Exception("Call to base class writes()") def op(self): raise Exception("Call to base class op()") def params(self): raise Exception("Call to base class params()") def run(self): raise Exception("Call to base class run()") class Opcode1(Opcode): """ >>> o = Opcode1([101, 2, 1, 3], 0) >>> o.run() True >>> o.memory [101, 2, 1, 4] """ def __init__(self, mem, ptr): super().__init__(mem, ptr, 1, 4) self.__first = self.get_val(1) self.__second = self.get_val(2) self.__res = mem[ptr+3] def run(self): self.memory[self.__res] = self.__first + self.__second return True def params(self): return {'noun':self.__first, 'verb':self.__second, 'result':self.__res} def reads(self): return [self.__first, self.__second] def writes(self): return self.__res def op(self): return "+" def __str__(self): return "loc[%d] = %d + %d"%(self.__res,self.__first,self.__second) class Opcode2(Opcode): """ >>> o = Opcode2([2, 2, 3, 4, 99], 0) >>> o.run() True >>> o.memory [2, 2, 3, 4, 12] """ def __init__(self, mem, ptr): super().__init__(mem, ptr, 2, 4) self.__first = self.get_val(1) self.__second = self.get_val(2) self.__res = mem[ptr+3] def run(self): self.memory[self.__res] = self.__first * self.__second return True def params(self): return {'noun':self.__first, 'verb':self.__second, 'result':self.__res} def reads(self): return [self.__first, self.__second] def writes(self): return self.__res def op(self): return "*" def __str__(self): return "loc[%d] = %d * %d"%(self.__res,self.__first,self.__second) class Opcode99(Opcode): """ >>> o = Opcode99([99,12,3,4,5], 0) >>> o.run() False """ def __init__(self, mem, ptr): super().__init__(mem, ptr, 99, 1) def run(self): return False def params(self): return {} def reads(self): return [] def writes(self): return None def op(self): return "HALT" def __str__(self): return "HALT" def default_ops(): return {1:Opcode1,2:Opcode2,99:Opcode99} class Interpreter(object): def __init__(self, input_code, ops=default_ops()): self.__memory = input_code self.__ops = ops self.__ptr = 0 self.__running = True self.length = len(self.__memory) def stepi(self): o = None if self.__running: o = self.next_op() self.__running = o.run() chk,val = o.set_ptr() if chk: self.__ptr = val else: self.__ptr += o.ptr_inc() return o def run(self): while self.__running: self.stepi() def inspect(self,loc): return self.__memory[loc] def next_op(self): return self.op_at(self.__ptr) def op_at(self, ptr): return self.__ops[self.__memory[ptr] % 100](self.__memory, ptr) def __str__(self): strs = [] for i,v in enumerate(self.__memory): if i == self.__ptr: strs.append("{:*>4}".format(v)) else: strs.append("{:>4}".format(v)) return ",".join(strs) + "\n" + "Next:\n\t" + str(self.next_op()) def poke(self,loc,val): self.__memory[loc] = val def rebind(self,code,call): self.__ops[code] = call def as_opcodes(self): ops = [self.op_at(0)] ptr = ops[-1].ptr_inc() while ops[-1].op() != "HALT": ops.append(self.op_at(ptr)) ptr += ops[-1].ptr_inc() return ops class ValueNode(object): def __init__(self,val,tag=''): self.__val = val self.__tag = tag def __str__(self): return self.__tag + str(self.__val) class OpNode(object): def __init__(self,op,depends): self.__op = op self.__depends = depends def __str__(self): return "(" + self.__op.op().join([str(i) for i in self.__depends]) + ")" class OpcodeTreeBuilder(object): def __init__(self, interp): self.__interpreter = interp self.__codes = interp.as_opcodes() def construct_mappings(self): for i in self.__codes: params = i.params() if 'result' in params.keys(): if params['result'] not in self.__writes_to.keys(): self.__writes_to[params['result']] = [] self.__writes_to[params['result']].append(i) if 'noun' in params.keys(): if params['noun'] not in self.__reads_from.keys(): self.__reads_from[params['noun']] = [] self.__reads_from[params['noun']].append(i) if 'verb' in params.keys(): if params['verb'] not in self.__reads_from.keys(): self.__reads_from[params['verb']] = [] self.__reads_from[params['verb']].append(i) def construct_graph(self): op = self.__interpreter.op_at(0) reads = [ValueNode(self.__interpreter.inspect(i),tag="raw%d_"%(i)) for i in op.reads()] writes = op.writes() base = OpNode(op,reads) ptr = op.ptr_inc() last_write = {} if writes: last_write[writes] = base while op.op() != "HALT": op = self.__interpreter.op_at(ptr) if op.op() == "HALT": break depends = [] for i in op.reads(): if i in last_write.keys(): depends.append(last_write[i]) else: depends.append(ValueNode(self.__interpreter.inspect(i))) base = OpNode(op,depends) if op.writes(): last_write[op.writes()] = base ptr += op.ptr_inc() return base if __name__=='__main__': import doctest doctest.testmod() ################################################# # i = Interpreter(file_to_code("day2_input.txt")) # i.run() # i.inspect(0)
normal
{ "blob_id": "653e65281984ebb06467aeadb6f0e2b11f1bcb4d", "index": 496, "step-1": "<mask token>\n\n\nclass Opcode1(Opcode):\n <mask token>\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 1, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first + self.__second\n return True\n <mask token>\n <mask token>\n <mask token>\n\n def op(self):\n return '+'\n <mask token>\n\n\nclass Opcode2(Opcode):\n \"\"\"\n\t>>> o = Opcode2([2, 2, 3, 4, 99], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[2, 2, 3, 4, 12]\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 2, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first * self.__second\n return True\n\n def params(self):\n return {'noun': self.__first, 'verb': self.__second, 'result': self\n .__res}\n\n def reads(self):\n return [self.__first, self.__second]\n\n def writes(self):\n return self.__res\n\n def op(self):\n return '*'\n\n def __str__(self):\n return 'loc[%d] = %d * %d' % (self.__res, self.__first, self.__second)\n\n\nclass Opcode99(Opcode):\n \"\"\"\n\t>>> o = Opcode99([99,12,3,4,5], 0)\n\t>>> o.run()\n\tFalse\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 99, 1)\n\n def run(self):\n return False\n\n def params(self):\n return {}\n\n def reads(self):\n return []\n\n def writes(self):\n return None\n\n def op(self):\n return 'HALT'\n\n def __str__(self):\n return 'HALT'\n\n\n<mask token>\n\n\nclass Interpreter(object):\n\n def __init__(self, input_code, ops=default_ops()):\n self.__memory = input_code\n self.__ops = ops\n self.__ptr = 0\n self.__running = True\n self.length = len(self.__memory)\n\n def stepi(self):\n o = None\n if self.__running:\n o = self.next_op()\n self.__running = o.run()\n chk, val = o.set_ptr()\n if chk:\n self.__ptr = val\n else:\n self.__ptr += o.ptr_inc()\n return o\n\n def run(self):\n while self.__running:\n self.stepi()\n\n def inspect(self, loc):\n return self.__memory[loc]\n\n def next_op(self):\n return self.op_at(self.__ptr)\n\n def op_at(self, ptr):\n return self.__ops[self.__memory[ptr] % 100](self.__memory, ptr)\n\n def __str__(self):\n strs = []\n for i, v in enumerate(self.__memory):\n if i == self.__ptr:\n strs.append('{:*>4}'.format(v))\n else:\n strs.append('{:>4}'.format(v))\n return ','.join(strs) + '\\n' + 'Next:\\n\\t' + str(self.next_op())\n\n def poke(self, loc, val):\n self.__memory[loc] = val\n\n def rebind(self, code, call):\n self.__ops[code] = call\n\n def as_opcodes(self):\n ops = [self.op_at(0)]\n ptr = ops[-1].ptr_inc()\n while ops[-1].op() != 'HALT':\n ops.append(self.op_at(ptr))\n ptr += ops[-1].ptr_inc()\n return ops\n\n\nclass ValueNode(object):\n\n def __init__(self, val, tag=''):\n self.__val = val\n self.__tag = tag\n\n def __str__(self):\n return self.__tag + str(self.__val)\n\n\nclass OpNode(object):\n\n def __init__(self, op, depends):\n self.__op = op\n self.__depends = depends\n\n def __str__(self):\n return '(' + self.__op.op().join([str(i) for i in self.__depends]\n ) + ')'\n\n\nclass OpcodeTreeBuilder(object):\n\n def __init__(self, interp):\n self.__interpreter = interp\n self.__codes = interp.as_opcodes()\n\n def construct_mappings(self):\n for i in self.__codes:\n params = i.params()\n if 'result' in params.keys():\n if params['result'] not in self.__writes_to.keys():\n self.__writes_to[params['result']] = []\n self.__writes_to[params['result']].append(i)\n if 'noun' in params.keys():\n if params['noun'] not in self.__reads_from.keys():\n self.__reads_from[params['noun']] = []\n self.__reads_from[params['noun']].append(i)\n if 'verb' in params.keys():\n if params['verb'] not in self.__reads_from.keys():\n self.__reads_from[params['verb']] = []\n self.__reads_from[params['verb']].append(i)\n\n def construct_graph(self):\n op = self.__interpreter.op_at(0)\n reads = [ValueNode(self.__interpreter.inspect(i), tag='raw%d_' % i) for\n i in op.reads()]\n writes = op.writes()\n base = OpNode(op, reads)\n ptr = op.ptr_inc()\n last_write = {}\n if writes:\n last_write[writes] = base\n while op.op() != 'HALT':\n op = self.__interpreter.op_at(ptr)\n if op.op() == 'HALT':\n break\n depends = []\n for i in op.reads():\n if i in last_write.keys():\n depends.append(last_write[i])\n else:\n depends.append(ValueNode(self.__interpreter.inspect(i)))\n base = OpNode(op, depends)\n if op.writes():\n last_write[op.writes()] = base\n ptr += op.ptr_inc()\n return base\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Opcode1(Opcode):\n <mask token>\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 1, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first + self.__second\n return True\n <mask token>\n <mask token>\n\n def writes(self):\n return self.__res\n\n def op(self):\n return '+'\n <mask token>\n\n\nclass Opcode2(Opcode):\n \"\"\"\n\t>>> o = Opcode2([2, 2, 3, 4, 99], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[2, 2, 3, 4, 12]\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 2, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first * self.__second\n return True\n\n def params(self):\n return {'noun': self.__first, 'verb': self.__second, 'result': self\n .__res}\n\n def reads(self):\n return [self.__first, self.__second]\n\n def writes(self):\n return self.__res\n\n def op(self):\n return '*'\n\n def __str__(self):\n return 'loc[%d] = %d * %d' % (self.__res, self.__first, self.__second)\n\n\nclass Opcode99(Opcode):\n \"\"\"\n\t>>> o = Opcode99([99,12,3,4,5], 0)\n\t>>> o.run()\n\tFalse\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 99, 1)\n\n def run(self):\n return False\n\n def params(self):\n return {}\n\n def reads(self):\n return []\n\n def writes(self):\n return None\n\n def op(self):\n return 'HALT'\n\n def __str__(self):\n return 'HALT'\n\n\n<mask token>\n\n\nclass Interpreter(object):\n\n def __init__(self, input_code, ops=default_ops()):\n self.__memory = input_code\n self.__ops = ops\n self.__ptr = 0\n self.__running = True\n self.length = len(self.__memory)\n\n def stepi(self):\n o = None\n if self.__running:\n o = self.next_op()\n self.__running = o.run()\n chk, val = o.set_ptr()\n if chk:\n self.__ptr = val\n else:\n self.__ptr += o.ptr_inc()\n return o\n\n def run(self):\n while self.__running:\n self.stepi()\n\n def inspect(self, loc):\n return self.__memory[loc]\n\n def next_op(self):\n return self.op_at(self.__ptr)\n\n def op_at(self, ptr):\n return self.__ops[self.__memory[ptr] % 100](self.__memory, ptr)\n\n def __str__(self):\n strs = []\n for i, v in enumerate(self.__memory):\n if i == self.__ptr:\n strs.append('{:*>4}'.format(v))\n else:\n strs.append('{:>4}'.format(v))\n return ','.join(strs) + '\\n' + 'Next:\\n\\t' + str(self.next_op())\n\n def poke(self, loc, val):\n self.__memory[loc] = val\n\n def rebind(self, code, call):\n self.__ops[code] = call\n\n def as_opcodes(self):\n ops = [self.op_at(0)]\n ptr = ops[-1].ptr_inc()\n while ops[-1].op() != 'HALT':\n ops.append(self.op_at(ptr))\n ptr += ops[-1].ptr_inc()\n return ops\n\n\nclass ValueNode(object):\n\n def __init__(self, val, tag=''):\n self.__val = val\n self.__tag = tag\n\n def __str__(self):\n return self.__tag + str(self.__val)\n\n\nclass OpNode(object):\n\n def __init__(self, op, depends):\n self.__op = op\n self.__depends = depends\n\n def __str__(self):\n return '(' + self.__op.op().join([str(i) for i in self.__depends]\n ) + ')'\n\n\nclass OpcodeTreeBuilder(object):\n\n def __init__(self, interp):\n self.__interpreter = interp\n self.__codes = interp.as_opcodes()\n\n def construct_mappings(self):\n for i in self.__codes:\n params = i.params()\n if 'result' in params.keys():\n if params['result'] not in self.__writes_to.keys():\n self.__writes_to[params['result']] = []\n self.__writes_to[params['result']].append(i)\n if 'noun' in params.keys():\n if params['noun'] not in self.__reads_from.keys():\n self.__reads_from[params['noun']] = []\n self.__reads_from[params['noun']].append(i)\n if 'verb' in params.keys():\n if params['verb'] not in self.__reads_from.keys():\n self.__reads_from[params['verb']] = []\n self.__reads_from[params['verb']].append(i)\n\n def construct_graph(self):\n op = self.__interpreter.op_at(0)\n reads = [ValueNode(self.__interpreter.inspect(i), tag='raw%d_' % i) for\n i in op.reads()]\n writes = op.writes()\n base = OpNode(op, reads)\n ptr = op.ptr_inc()\n last_write = {}\n if writes:\n last_write[writes] = base\n while op.op() != 'HALT':\n op = self.__interpreter.op_at(ptr)\n if op.op() == 'HALT':\n break\n depends = []\n for i in op.reads():\n if i in last_write.keys():\n depends.append(last_write[i])\n else:\n depends.append(ValueNode(self.__interpreter.inspect(i)))\n base = OpNode(op, depends)\n if op.writes():\n last_write[op.writes()] = base\n ptr += op.ptr_inc()\n return base\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Opcode(object):\n <mask token>\n\n def ptr_inc(self):\n return self.__ptr_inc\n\n def get_val(self, arg_idx):\n \"\"\"\n\t\t>>> o = Opcode([1001, 2, 4, 1], 0, 1, 4)\n\t\t>>> o.get_val(1)\n\t\t4\n\t\t>>> o.get_val(2)\n\t\t4\n\t\t>>> o.get_val(3)\n\t\t2\n\t\t\"\"\"\n idx = arg_idx - 1\n if idx >= len(self.__par_modes) or self.__par_modes[idx] == 0:\n return self.memory[self.memory[self.ptr + arg_idx]]\n elif self.__par_modes[idx] == 1:\n return self.memory[self.ptr + arg_idx]\n\n def set_ptr(self):\n return False, 0\n\n def reads(self):\n raise Exception('Call to base class reads()')\n <mask token>\n <mask token>\n\n def params(self):\n raise Exception('Call to base class params()')\n\n def run(self):\n raise Exception('Call to base class run()')\n\n\nclass Opcode1(Opcode):\n \"\"\"\n\t>>> o = Opcode1([101, 2, 1, 3], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[101, 2, 1, 4]\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 1, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first + self.__second\n return True\n\n def params(self):\n return {'noun': self.__first, 'verb': self.__second, 'result': self\n .__res}\n\n def reads(self):\n return [self.__first, self.__second]\n\n def writes(self):\n return self.__res\n\n def op(self):\n return '+'\n\n def __str__(self):\n return 'loc[%d] = %d + %d' % (self.__res, self.__first, self.__second)\n\n\nclass Opcode2(Opcode):\n \"\"\"\n\t>>> o = Opcode2([2, 2, 3, 4, 99], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[2, 2, 3, 4, 12]\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 2, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first * self.__second\n return True\n\n def params(self):\n return {'noun': self.__first, 'verb': self.__second, 'result': self\n .__res}\n\n def reads(self):\n return [self.__first, self.__second]\n\n def writes(self):\n return self.__res\n\n def op(self):\n return '*'\n\n def __str__(self):\n return 'loc[%d] = %d * %d' % (self.__res, self.__first, self.__second)\n\n\nclass Opcode99(Opcode):\n \"\"\"\n\t>>> o = Opcode99([99,12,3,4,5], 0)\n\t>>> o.run()\n\tFalse\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 99, 1)\n\n def run(self):\n return False\n\n def params(self):\n return {}\n\n def reads(self):\n return []\n\n def writes(self):\n return None\n\n def op(self):\n return 'HALT'\n\n def __str__(self):\n return 'HALT'\n\n\n<mask token>\n\n\nclass Interpreter(object):\n\n def __init__(self, input_code, ops=default_ops()):\n self.__memory = input_code\n self.__ops = ops\n self.__ptr = 0\n self.__running = True\n self.length = len(self.__memory)\n\n def stepi(self):\n o = None\n if self.__running:\n o = self.next_op()\n self.__running = o.run()\n chk, val = o.set_ptr()\n if chk:\n self.__ptr = val\n else:\n self.__ptr += o.ptr_inc()\n return o\n\n def run(self):\n while self.__running:\n self.stepi()\n\n def inspect(self, loc):\n return self.__memory[loc]\n\n def next_op(self):\n return self.op_at(self.__ptr)\n\n def op_at(self, ptr):\n return self.__ops[self.__memory[ptr] % 100](self.__memory, ptr)\n\n def __str__(self):\n strs = []\n for i, v in enumerate(self.__memory):\n if i == self.__ptr:\n strs.append('{:*>4}'.format(v))\n else:\n strs.append('{:>4}'.format(v))\n return ','.join(strs) + '\\n' + 'Next:\\n\\t' + str(self.next_op())\n\n def poke(self, loc, val):\n self.__memory[loc] = val\n\n def rebind(self, code, call):\n self.__ops[code] = call\n\n def as_opcodes(self):\n ops = [self.op_at(0)]\n ptr = ops[-1].ptr_inc()\n while ops[-1].op() != 'HALT':\n ops.append(self.op_at(ptr))\n ptr += ops[-1].ptr_inc()\n return ops\n\n\nclass ValueNode(object):\n\n def __init__(self, val, tag=''):\n self.__val = val\n self.__tag = tag\n\n def __str__(self):\n return self.__tag + str(self.__val)\n\n\nclass OpNode(object):\n\n def __init__(self, op, depends):\n self.__op = op\n self.__depends = depends\n\n def __str__(self):\n return '(' + self.__op.op().join([str(i) for i in self.__depends]\n ) + ')'\n\n\nclass OpcodeTreeBuilder(object):\n\n def __init__(self, interp):\n self.__interpreter = interp\n self.__codes = interp.as_opcodes()\n\n def construct_mappings(self):\n for i in self.__codes:\n params = i.params()\n if 'result' in params.keys():\n if params['result'] not in self.__writes_to.keys():\n self.__writes_to[params['result']] = []\n self.__writes_to[params['result']].append(i)\n if 'noun' in params.keys():\n if params['noun'] not in self.__reads_from.keys():\n self.__reads_from[params['noun']] = []\n self.__reads_from[params['noun']].append(i)\n if 'verb' in params.keys():\n if params['verb'] not in self.__reads_from.keys():\n self.__reads_from[params['verb']] = []\n self.__reads_from[params['verb']].append(i)\n\n def construct_graph(self):\n op = self.__interpreter.op_at(0)\n reads = [ValueNode(self.__interpreter.inspect(i), tag='raw%d_' % i) for\n i in op.reads()]\n writes = op.writes()\n base = OpNode(op, reads)\n ptr = op.ptr_inc()\n last_write = {}\n if writes:\n last_write[writes] = base\n while op.op() != 'HALT':\n op = self.__interpreter.op_at(ptr)\n if op.op() == 'HALT':\n break\n depends = []\n for i in op.reads():\n if i in last_write.keys():\n depends.append(last_write[i])\n else:\n depends.append(ValueNode(self.__interpreter.inspect(i)))\n base = OpNode(op, depends)\n if op.writes():\n last_write[op.writes()] = base\n ptr += op.ptr_inc()\n return base\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass Opcode(object):\n\n def __init__(self, mem, ptr, code, inc):\n \"\"\"\n\t\t>>> o = Opcode([1001, 2, 4, 1], 0, 1, 4)\n\t\t>>> o._Opcode__par_modes\n\t\t[0, 1]\n\t\t\"\"\"\n if mem[ptr] % 100 != code:\n raise Exception('Creating Opcode%d for opcode %d' % (code, mem[\n ptr]))\n self.memory = mem\n self.ptr = ptr\n self.__par_modes = list(reversed([int(i) for i in str(int(mem[ptr] /\n 100))]))\n self.__ptr_inc = inc\n\n def ptr_inc(self):\n return self.__ptr_inc\n\n def get_val(self, arg_idx):\n \"\"\"\n\t\t>>> o = Opcode([1001, 2, 4, 1], 0, 1, 4)\n\t\t>>> o.get_val(1)\n\t\t4\n\t\t>>> o.get_val(2)\n\t\t4\n\t\t>>> o.get_val(3)\n\t\t2\n\t\t\"\"\"\n idx = arg_idx - 1\n if idx >= len(self.__par_modes) or self.__par_modes[idx] == 0:\n return self.memory[self.memory[self.ptr + arg_idx]]\n elif self.__par_modes[idx] == 1:\n return self.memory[self.ptr + arg_idx]\n\n def set_ptr(self):\n return False, 0\n\n def reads(self):\n raise Exception('Call to base class reads()')\n\n def writes(self):\n raise Exception('Call to base class writes()')\n <mask token>\n\n def params(self):\n raise Exception('Call to base class params()')\n\n def run(self):\n raise Exception('Call to base class run()')\n\n\nclass Opcode1(Opcode):\n \"\"\"\n\t>>> o = Opcode1([101, 2, 1, 3], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[101, 2, 1, 4]\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 1, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first + self.__second\n return True\n\n def params(self):\n return {'noun': self.__first, 'verb': self.__second, 'result': self\n .__res}\n\n def reads(self):\n return [self.__first, self.__second]\n\n def writes(self):\n return self.__res\n\n def op(self):\n return '+'\n\n def __str__(self):\n return 'loc[%d] = %d + %d' % (self.__res, self.__first, self.__second)\n\n\nclass Opcode2(Opcode):\n \"\"\"\n\t>>> o = Opcode2([2, 2, 3, 4, 99], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[2, 2, 3, 4, 12]\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 2, 4)\n self.__first = self.get_val(1)\n self.__second = self.get_val(2)\n self.__res = mem[ptr + 3]\n\n def run(self):\n self.memory[self.__res] = self.__first * self.__second\n return True\n\n def params(self):\n return {'noun': self.__first, 'verb': self.__second, 'result': self\n .__res}\n\n def reads(self):\n return [self.__first, self.__second]\n\n def writes(self):\n return self.__res\n\n def op(self):\n return '*'\n\n def __str__(self):\n return 'loc[%d] = %d * %d' % (self.__res, self.__first, self.__second)\n\n\nclass Opcode99(Opcode):\n \"\"\"\n\t>>> o = Opcode99([99,12,3,4,5], 0)\n\t>>> o.run()\n\tFalse\n\t\"\"\"\n\n def __init__(self, mem, ptr):\n super().__init__(mem, ptr, 99, 1)\n\n def run(self):\n return False\n\n def params(self):\n return {}\n\n def reads(self):\n return []\n\n def writes(self):\n return None\n\n def op(self):\n return 'HALT'\n\n def __str__(self):\n return 'HALT'\n\n\n<mask token>\n\n\nclass Interpreter(object):\n\n def __init__(self, input_code, ops=default_ops()):\n self.__memory = input_code\n self.__ops = ops\n self.__ptr = 0\n self.__running = True\n self.length = len(self.__memory)\n\n def stepi(self):\n o = None\n if self.__running:\n o = self.next_op()\n self.__running = o.run()\n chk, val = o.set_ptr()\n if chk:\n self.__ptr = val\n else:\n self.__ptr += o.ptr_inc()\n return o\n\n def run(self):\n while self.__running:\n self.stepi()\n\n def inspect(self, loc):\n return self.__memory[loc]\n\n def next_op(self):\n return self.op_at(self.__ptr)\n\n def op_at(self, ptr):\n return self.__ops[self.__memory[ptr] % 100](self.__memory, ptr)\n\n def __str__(self):\n strs = []\n for i, v in enumerate(self.__memory):\n if i == self.__ptr:\n strs.append('{:*>4}'.format(v))\n else:\n strs.append('{:>4}'.format(v))\n return ','.join(strs) + '\\n' + 'Next:\\n\\t' + str(self.next_op())\n\n def poke(self, loc, val):\n self.__memory[loc] = val\n\n def rebind(self, code, call):\n self.__ops[code] = call\n\n def as_opcodes(self):\n ops = [self.op_at(0)]\n ptr = ops[-1].ptr_inc()\n while ops[-1].op() != 'HALT':\n ops.append(self.op_at(ptr))\n ptr += ops[-1].ptr_inc()\n return ops\n\n\nclass ValueNode(object):\n\n def __init__(self, val, tag=''):\n self.__val = val\n self.__tag = tag\n\n def __str__(self):\n return self.__tag + str(self.__val)\n\n\nclass OpNode(object):\n\n def __init__(self, op, depends):\n self.__op = op\n self.__depends = depends\n\n def __str__(self):\n return '(' + self.__op.op().join([str(i) for i in self.__depends]\n ) + ')'\n\n\nclass OpcodeTreeBuilder(object):\n\n def __init__(self, interp):\n self.__interpreter = interp\n self.__codes = interp.as_opcodes()\n\n def construct_mappings(self):\n for i in self.__codes:\n params = i.params()\n if 'result' in params.keys():\n if params['result'] not in self.__writes_to.keys():\n self.__writes_to[params['result']] = []\n self.__writes_to[params['result']].append(i)\n if 'noun' in params.keys():\n if params['noun'] not in self.__reads_from.keys():\n self.__reads_from[params['noun']] = []\n self.__reads_from[params['noun']].append(i)\n if 'verb' in params.keys():\n if params['verb'] not in self.__reads_from.keys():\n self.__reads_from[params['verb']] = []\n self.__reads_from[params['verb']].append(i)\n\n def construct_graph(self):\n op = self.__interpreter.op_at(0)\n reads = [ValueNode(self.__interpreter.inspect(i), tag='raw%d_' % i) for\n i in op.reads()]\n writes = op.writes()\n base = OpNode(op, reads)\n ptr = op.ptr_inc()\n last_write = {}\n if writes:\n last_write[writes] = base\n while op.op() != 'HALT':\n op = self.__interpreter.op_at(ptr)\n if op.op() == 'HALT':\n break\n depends = []\n for i in op.reads():\n if i in last_write.keys():\n depends.append(last_write[i])\n else:\n depends.append(ValueNode(self.__interpreter.inspect(i)))\n base = OpNode(op, depends)\n if op.writes():\n last_write[op.writes()] = base\n ptr += op.ptr_inc()\n return base\n\n\n<mask token>\n", "step-5": "#!/usr/bin/python3\n\ndef file_to_code(fname):\n\tmem = []\n\tfor line in open(fname,\"r\"):\n\t\tmem.extend([int(i) for i in line.split(\",\")])\n\treturn mem\n\nclass Opcode(object):\n\tdef __init__(self, mem, ptr, code, inc):\n\t\t\"\"\"\n\t\t>>> o = Opcode([1001, 2, 4, 1], 0, 1, 4)\n\t\t>>> o._Opcode__par_modes\n\t\t[0, 1]\n\t\t\"\"\"\n\t\tif mem[ptr]%100 != code:\n\t\t\traise Exception(\"Creating Opcode%d for opcode %d\"%(code, mem[ptr]))\n\t\tself.memory = mem\n\t\tself.ptr = ptr\n\t\tself.__par_modes = list(reversed([int(i) for i in str(int(mem[ptr]/100))]))\n\t\tself.__ptr_inc = inc\n\n\tdef ptr_inc(self):\n\t\treturn self.__ptr_inc\n\n\tdef get_val(self, arg_idx):\n\t\t\"\"\"\n\t\t>>> o = Opcode([1001, 2, 4, 1], 0, 1, 4)\n\t\t>>> o.get_val(1)\n\t\t4\n\t\t>>> o.get_val(2)\n\t\t4\n\t\t>>> o.get_val(3)\n\t\t2\n\t\t\"\"\"\n\t\tidx = arg_idx-1\n\t\tif idx >= len(self.__par_modes) or self.__par_modes[idx] == 0:\n\t\t\treturn self.memory[self.memory[self.ptr+arg_idx]]\n\t\telif self.__par_modes[idx] == 1:\n\t\t\treturn self.memory[self.ptr + arg_idx]\n\n\tdef set_ptr(self):\n\t\treturn False,0\n\n\tdef reads(self):\n\t\traise Exception(\"Call to base class reads()\")\n\n\tdef writes(self):\n\t\traise Exception(\"Call to base class writes()\")\n\n\tdef op(self):\n\t\traise Exception(\"Call to base class op()\")\n\n\tdef params(self):\n\t\traise Exception(\"Call to base class params()\")\n\n\tdef run(self):\n\t\traise Exception(\"Call to base class run()\")\n\n\nclass Opcode1(Opcode):\n\t\"\"\"\n\t>>> o = Opcode1([101, 2, 1, 3], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[101, 2, 1, 4]\n\t\"\"\"\n\tdef __init__(self, mem, ptr):\n\t\tsuper().__init__(mem, ptr, 1, 4)\n\t\tself.__first = self.get_val(1)\n\t\tself.__second = self.get_val(2)\n\t\tself.__res = mem[ptr+3]\n\n\tdef run(self):\n\t\tself.memory[self.__res] = self.__first + self.__second\n\t\treturn True\n\n\tdef params(self):\n\t\treturn {'noun':self.__first, 'verb':self.__second, 'result':self.__res}\n\n\tdef reads(self):\n\t\treturn [self.__first, self.__second]\n\n\tdef writes(self):\n\t\treturn self.__res\n\n\tdef op(self):\n\t\treturn \"+\"\n\n\tdef __str__(self):\n\t\treturn \"loc[%d] = %d + %d\"%(self.__res,self.__first,self.__second)\n\nclass Opcode2(Opcode):\n\t\"\"\"\n\t>>> o = Opcode2([2, 2, 3, 4, 99], 0)\n\t>>> o.run()\n\tTrue\n\t>>> o.memory\n\t[2, 2, 3, 4, 12]\n\t\"\"\"\n\tdef __init__(self, mem, ptr):\n\t\tsuper().__init__(mem, ptr, 2, 4)\n\t\tself.__first = self.get_val(1)\n\t\tself.__second = self.get_val(2)\n\t\tself.__res = mem[ptr+3]\n\n\tdef run(self):\n\t\tself.memory[self.__res] = self.__first * self.__second\n\t\treturn True\n\n\tdef params(self):\n\t\treturn {'noun':self.__first, 'verb':self.__second, 'result':self.__res}\n\n\tdef reads(self):\n\t\treturn [self.__first, self.__second]\n\n\tdef writes(self):\n\t\treturn self.__res\n\n\tdef op(self):\n\t\treturn \"*\"\n\n\tdef __str__(self):\n\t\treturn \"loc[%d] = %d * %d\"%(self.__res,self.__first,self.__second)\n\nclass Opcode99(Opcode):\n\t\"\"\"\n\t>>> o = Opcode99([99,12,3,4,5], 0)\n\t>>> o.run()\n\tFalse\n\t\"\"\"\n\tdef __init__(self, mem, ptr):\n\t\tsuper().__init__(mem, ptr, 99, 1)\n\n\tdef run(self):\n\t\treturn False\n\n\tdef params(self):\n\t\treturn {}\n\n\tdef reads(self):\n\t\treturn []\n\n\tdef writes(self):\n\t\treturn None\n\n\tdef op(self):\n\t\treturn \"HALT\"\n\n\tdef __str__(self):\n\t\treturn \"HALT\"\n\ndef default_ops():\n\treturn {1:Opcode1,2:Opcode2,99:Opcode99}\n\nclass Interpreter(object):\n\tdef __init__(self, input_code, ops=default_ops()):\n\t\tself.__memory = input_code\n\n\t\tself.__ops = ops\n\t\tself.__ptr = 0\n\t\tself.__running = True\n\t\tself.length = len(self.__memory)\n\n\tdef stepi(self):\n\t\to = None\n\t\tif self.__running:\n\t\t\to = self.next_op()\n\t\t\tself.__running = o.run()\n\t\t\tchk,val = o.set_ptr()\n\t\t\tif chk:\n\t\t\t\tself.__ptr = val\n\t\t\telse:\n\t\t\t\tself.__ptr += o.ptr_inc()\n\t\treturn o\n\n\tdef run(self):\n\t\twhile self.__running:\n\t\t\tself.stepi()\n\n\tdef inspect(self,loc):\n\t\treturn self.__memory[loc]\n\n\tdef next_op(self):\n\t\treturn self.op_at(self.__ptr)\n\n\tdef op_at(self, ptr):\n\t\treturn self.__ops[self.__memory[ptr] % 100](self.__memory, ptr)\n\n\tdef __str__(self):\n\t\tstrs = []\n\t\tfor i,v in enumerate(self.__memory):\n\t\t\tif i == self.__ptr:\n\t\t\t\tstrs.append(\"{:*>4}\".format(v))\n\t\t\telse:\n\t\t\t\tstrs.append(\"{:>4}\".format(v))\n\t\treturn \",\".join(strs) + \"\\n\" + \"Next:\\n\\t\" + str(self.next_op())\n\n\tdef poke(self,loc,val):\n\t\tself.__memory[loc] = val\n\n\tdef rebind(self,code,call):\n\t\tself.__ops[code] = call\n\n\tdef as_opcodes(self):\n\t\tops = [self.op_at(0)]\n\t\tptr = ops[-1].ptr_inc()\n\t\twhile ops[-1].op() != \"HALT\":\n\t\t\tops.append(self.op_at(ptr))\n\t\t\tptr += ops[-1].ptr_inc()\n\t\treturn ops\n\nclass ValueNode(object):\n\tdef __init__(self,val,tag=''):\n\t\tself.__val = val\n\t\tself.__tag = tag\n\n\tdef __str__(self):\n\t\treturn self.__tag + str(self.__val)\n\nclass OpNode(object):\n\tdef __init__(self,op,depends):\n\t\tself.__op = op\n\t\tself.__depends = depends\n\n\tdef __str__(self):\n\t\treturn \"(\" + self.__op.op().join([str(i) for i in self.__depends]) + \")\"\n\nclass OpcodeTreeBuilder(object):\n\tdef __init__(self, interp):\n\t\tself.__interpreter = interp\n\t\tself.__codes = interp.as_opcodes()\n\n\tdef construct_mappings(self):\n\t\tfor i in self.__codes:\n\t\t\tparams = i.params()\n\t\t\tif 'result' in params.keys():\n\t\t\t\tif params['result'] not in self.__writes_to.keys():\n\t\t\t\t\tself.__writes_to[params['result']] = []\n\t\t\t\tself.__writes_to[params['result']].append(i)\n\t\t\tif 'noun' in params.keys():\n\t\t\t\tif params['noun'] not in self.__reads_from.keys():\n\t\t\t\t\tself.__reads_from[params['noun']] = []\n\t\t\t\tself.__reads_from[params['noun']].append(i)\n\t\t\tif 'verb' in params.keys():\n\t\t\t\tif params['verb'] not in self.__reads_from.keys():\n\t\t\t\t\tself.__reads_from[params['verb']] = []\n\t\t\t\tself.__reads_from[params['verb']].append(i)\n\n\tdef construct_graph(self):\n\t\top = self.__interpreter.op_at(0)\n\t\treads = [ValueNode(self.__interpreter.inspect(i),tag=\"raw%d_\"%(i)) for i in op.reads()]\n\t\twrites = op.writes()\n\t\tbase = OpNode(op,reads)\n\t\tptr = op.ptr_inc()\n\t\tlast_write = {}\n\t\tif writes:\n\t\t\tlast_write[writes] = base\n\t\twhile op.op() != \"HALT\":\n\t\t\top = self.__interpreter.op_at(ptr)\n\t\t\tif op.op() == \"HALT\":\n\t\t\t\tbreak\n\t\t\tdepends = []\n\t\t\tfor i in op.reads():\n\t\t\t\tif i in last_write.keys():\n\t\t\t\t\tdepends.append(last_write[i])\n\t\t\t\telse:\n\t\t\t\t\tdepends.append(ValueNode(self.__interpreter.inspect(i)))\n\t\t\tbase = OpNode(op,depends)\n\t\t\tif op.writes():\n\t\t\t\tlast_write[op.writes()] = base\n\t\t\tptr += op.ptr_inc()\n\t\treturn base\n\nif __name__=='__main__':\n\timport doctest\n\tdoctest.testmod()\n\n#################################################\n\n#\ti = Interpreter(file_to_code(\"day2_input.txt\"))\n#\ti.run()\n#\ti.inspect(0)\n", "step-ids": [ 43, 44, 55, 57, 62 ] }
[ 43, 44, 55, 57, 62 ]
import sys from PyQt5.QtWidgets import * from PyQt5.QtGui import QIcon, QFont from PyQt5.QtCore import QCoreApplication import pymysql import requests from twisted.internet import reactor, defer from scrapy.crawler import CrawlerRunner, CrawlerProcess from scrapy.utils.project import get_project_settings from spider.jump_300heroes.jump_300heroes.spiders.my_report import JumpReport from scrapy.settings import Settings from PyQt5.QtCore import * from PyQt5.QtGui import * from multiprocessing import Process def db_handle(): con = pymysql.connect( host='localhost', user='web', passwd='web', charset='utf8', database='heroes' ) return con class Example(QWidget): class A(QWidget): def __init__(self): super().__init__() self.initUI() def initUI(self): self.setGeometry(300, 300, 300, 220) self.setWindowTitle('Icon') self.setWindowIcon(QIcon('web.png')) self.show() def __init__(self): super().__init__() self.initUI() def initUI(self): #QToolTip.setFont(QFont('SanSerif', 10)) #self.setToolTip('This is a <b>QWidget</b> widget') #textEdit = QTextEdit() #self.setCentralWidget(textEdit) self.qle = QLineEdit("蔽月八云") self.user = self.qle.text() self.para = "user={}".format(self.user) print(self.user, '1') btn = QPushButton('查询', self) #btn.setToolTip('This is a <b>QPushButton</b> widget') btn.resize(btn.sizeHint()) btn.clicked.connect(self.search) self.txt = QTextEdit() #self.txt.textChanged.connect(self.adjustSize) self.battle = QTextEdit() self.player_status = QTextEdit() self.create_table() # 名称不能用Quit、Exit,用了就无法显示,原因不明 exitAction = QAction('Exit', self) exitAction.setShortcut('Ctrl+Q') exitAction.setStatusTip('application') exitAction.triggered.connect(qApp.quit) #self.statusBar() #menubar = QMainWindow.menuBar() # Mac OS的状态栏显示不一样 #menubar.setNativeMenuBar(False) #fileMenu = menubar.addMenu('&File') #fileMenu.addAction(exitAction) #toolbar = self.addToolBar('Exit') #toolbar.addAction(exitAction) grid = QGridLayout() grid.setSpacing(10) grid.addWidget(self.qle, 1, 0) grid.addWidget(btn, 2, 0) grid.addWidget(self.txt, 3, 0) grid.addWidget(self.battle, 1, 1, 3, 1) grid.addWidget(self.player_status, 4, 0, 2, 2) grid.addWidget(self.battle_table, 6, 0, 2, 2) self.setLayout(grid) self.setGeometry(600, 600, 800, 600) self.center() self.setWindowTitle("战绩查询") self.show() def create_table(self): # 设置表 self.battle_table = QTableWidget() # 表列数,行数在下方读取数据时,根据数据量建立 self.battle_table.setColumnCount(8) # 设置表头 self.battle_table.setHorizontalHeaderLabels( ['match_id', 'head', 'date', 'time', 'kill_count', 'death', 'support', 'score']) # 隔行变色 self.battle_table.setAlternatingRowColors(True) # 整行选中 self.battle_table.setSelectionBehavior(QAbstractItemView.SelectRows) # 将列调整到跟内容大小相匹配 # self.battle_table.resizeColumnsToContents() # #将行大小调整到跟内容的大小相匹配 self.battle_table.resizeRowsToContents() # 点击事件 self.battle_table.doubleClicked.connect(self.on_click) @pyqtSlot() def on_click(self): currentQTableWidgetItem = self.battle_table.selectedItems()[0] # 点击的行包含的比赛id #match_id = self.battle_table.item(currentQTableWidgetItem.row(), 0).text() match_id = currentQTableWidgetItem.text() print(match_id) self.showDialog(match_id) def showDialog(self, match_id): data = requests.get('http://300report.jumpw.com/api/getmatch?id={}'.format(match_id)) a = self.A() ## 启动爬虫,获取该场比赛所有人的数据 #runner = CrawlerRunner(get_project_settings()) #runner.crawl('JumpReport') #d = runner.join() #d.addBoth(lambda _: reactor.stop()) #reactor.run() # 阻塞运行爬虫 # #text, ok = QInputDialog.getText(self, 'Input Dialog', # 'Enter your name:') def searchd(self): if __name__ == '__main__': #print(user, '2') p = Process(target=self.a) p.start() p.join() def search(self): print(self.user) print(__name__) #print(user, '3') #process = CrawlerProcess(get_project_settings()) #process.crawl('JumpReport') #process.start() #process.stop() #process.put() # 脚本执行爬虫代码 runner = CrawlerRunner(get_project_settings()) #def search(runner, keyword): # return runner.crawl(JumpReport, keyword) #runner = CrawlerProcess() #dfs = set() print('a') runner.crawl('JumpReport', user=self.user) print(self.user) d = runner.join() #dfs.add(d) #defer.DeferredList(dfs).addBoth(lambda _: reactor.stop()) d.addBoth(lambda _: reactor.stop()) #search(runner, "abcd") #search(runner, "beat") #runner.start() reactor.run() # 阻塞运行爬虫 print("complete") # runner = CrawlerRunner(get_project_settings()) # dfs = set() # for domain in range(2): # d = runner.crawl('JumpReport') # dfs.add(d) # # defer.DeferredList(dfs).addBoth(lambda _: reactor.stop()) # reactor.run() # the script will block here until all crawling jobs are finished # runner = CrawlerRunner(get_project_settings()) # # @defer.inlineCallbacks # def crawl(): # for domain in range(2): # yield runner.crawl('JumpReport') # reactor.stop() # # crawl() # reactor.run() # the script will block here until the last crawl call is finished # settings = Settings({'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'}) # runner = CrawlerRunner(settings) # # d = runner.crawl(JumpReport) # d.addBoth(lambda _: reactor.stop()) # reactor.run() # the script will block here until the crawling is finished # runner = CrawlerProcess(get_project_settings()) # runner.crawl(JumpReport) # runner.start() name = self.qle.text() db = db_handle() with db as con: sql = "select * from player where name = '{}' order by update_time".format(name) con.execute(sql) player = con.fetchone() if player: id, name, win, match_count, strength, level, update_time, rank = player text = "角色名: {}\n胜场: {}\n总场数: {}\n团分: {}\n团分排行: {}\n等级: {}\n更新时间: {}".format( name, win, match_count, strength, rank, level, update_time) self.txt.setText(text) sql = "select * from player_data where name = '{}' order by date".format(name) con.execute(sql) player_data = con.fetchall() a = "" for data in player_data: a += str(data) a += "\n" self.battle.setText(str(a)) sql = "select * from game_data order by match_id desc" con.execute(sql) game_data = con.fetchall() a = "" l = 0 self.battle_table.setRowCount(len(game_data)) for data in game_data: a += str(data[1:]) print(type(data)) for i in range(self.battle_table.columnCount()): item = QTableWidgetItem(str(data[i + 1])) # 设置填入数据的排列位置(左右居中| 上下居中) item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter) self.battle_table.setItem(l, i, item) a += "\n" self.player_status.setText(str(a)) l += 1 #for i in range(len(list(a))): # self.battle_table.setLayout(str(a)) def center(self): qr = self.frameGeometry() cp = QDesktopWidget().availableGeometry().center() qr.moveCenter(cp) self.move(qr.topLeft()) def closeEvent(self, event): reply = QMessageBox.question(self, 'Message', "Quit?", QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes) if reply == QMessageBox.Yes: event.accept() else: event.ignore() class BatterReport(QWidget): def __init__(self): super().__init__() self.initUI() def initUI(self): self.txt = QTextEdit() if __name__ == '__main__': app = QApplication(sys.argv) ex = Example() sys.exit(app.exec_())
normal
{ "blob_id": "889d465ceeac57a600b2fa3bd26632edcd90a655", "index": 2911, "step-1": "<mask token>\n\n\nclass Example(QWidget):\n\n\n class A(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.setGeometry(300, 300, 300, 220)\n self.setWindowTitle('Icon')\n self.setWindowIcon(QIcon('web.png'))\n self.show()\n <mask token>\n <mask token>\n\n def create_table(self):\n self.battle_table = QTableWidget()\n self.battle_table.setColumnCount(8)\n self.battle_table.setHorizontalHeaderLabels(['match_id', 'head',\n 'date', 'time', 'kill_count', 'death', 'support', 'score'])\n self.battle_table.setAlternatingRowColors(True)\n self.battle_table.setSelectionBehavior(QAbstractItemView.SelectRows)\n self.battle_table.resizeRowsToContents()\n self.battle_table.doubleClicked.connect(self.on_click)\n <mask token>\n\n def showDialog(self, match_id):\n data = requests.get('http://300report.jumpw.com/api/getmatch?id={}'\n .format(match_id))\n a = self.A()\n <mask token>\n\n def search(self):\n print(self.user)\n print(__name__)\n runner = CrawlerRunner(get_project_settings())\n print('a')\n runner.crawl('JumpReport', user=self.user)\n print(self.user)\n d = runner.join()\n d.addBoth(lambda _: reactor.stop())\n reactor.run()\n print('complete')\n name = self.qle.text()\n db = db_handle()\n with db as con:\n sql = (\n \"select * from player where name = '{}' order by update_time\"\n .format(name))\n con.execute(sql)\n player = con.fetchone()\n if player:\n (id, name, win, match_count, strength, level, update_time, rank\n ) = player\n text = (\n '角色名: {}\\n胜场: {}\\n总场数: {}\\n团分: {}\\n团分排行: {}\\n等级: {}\\n更新时间: {}'\n .format(name, win, match_count, strength, rank, level,\n update_time))\n self.txt.setText(text)\n sql = (\"select * from player_data where name = '{}' order by date\"\n .format(name))\n con.execute(sql)\n player_data = con.fetchall()\n a = ''\n for data in player_data:\n a += str(data)\n a += '\\n'\n self.battle.setText(str(a))\n sql = 'select * from game_data order by match_id desc'\n con.execute(sql)\n game_data = con.fetchall()\n a = ''\n l = 0\n self.battle_table.setRowCount(len(game_data))\n for data in game_data:\n a += str(data[1:])\n print(type(data))\n for i in range(self.battle_table.columnCount()):\n item = QTableWidgetItem(str(data[i + 1]))\n item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter)\n self.battle_table.setItem(l, i, item)\n a += '\\n'\n self.player_status.setText(str(a))\n l += 1\n <mask token>\n <mask token>\n\n\nclass BatterReport(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.txt = QTextEdit()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Example(QWidget):\n\n\n class A(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.setGeometry(300, 300, 300, 220)\n self.setWindowTitle('Icon')\n self.setWindowIcon(QIcon('web.png'))\n self.show()\n <mask token>\n\n def initUI(self):\n self.qle = QLineEdit('蔽月八云')\n self.user = self.qle.text()\n self.para = 'user={}'.format(self.user)\n print(self.user, '1')\n btn = QPushButton('查询', self)\n btn.resize(btn.sizeHint())\n btn.clicked.connect(self.search)\n self.txt = QTextEdit()\n self.battle = QTextEdit()\n self.player_status = QTextEdit()\n self.create_table()\n exitAction = QAction('Exit', self)\n exitAction.setShortcut('Ctrl+Q')\n exitAction.setStatusTip('application')\n exitAction.triggered.connect(qApp.quit)\n grid = QGridLayout()\n grid.setSpacing(10)\n grid.addWidget(self.qle, 1, 0)\n grid.addWidget(btn, 2, 0)\n grid.addWidget(self.txt, 3, 0)\n grid.addWidget(self.battle, 1, 1, 3, 1)\n grid.addWidget(self.player_status, 4, 0, 2, 2)\n grid.addWidget(self.battle_table, 6, 0, 2, 2)\n self.setLayout(grid)\n self.setGeometry(600, 600, 800, 600)\n self.center()\n self.setWindowTitle('战绩查询')\n self.show()\n\n def create_table(self):\n self.battle_table = QTableWidget()\n self.battle_table.setColumnCount(8)\n self.battle_table.setHorizontalHeaderLabels(['match_id', 'head',\n 'date', 'time', 'kill_count', 'death', 'support', 'score'])\n self.battle_table.setAlternatingRowColors(True)\n self.battle_table.setSelectionBehavior(QAbstractItemView.SelectRows)\n self.battle_table.resizeRowsToContents()\n self.battle_table.doubleClicked.connect(self.on_click)\n <mask token>\n\n def showDialog(self, match_id):\n data = requests.get('http://300report.jumpw.com/api/getmatch?id={}'\n .format(match_id))\n a = self.A()\n <mask token>\n\n def search(self):\n print(self.user)\n print(__name__)\n runner = CrawlerRunner(get_project_settings())\n print('a')\n runner.crawl('JumpReport', user=self.user)\n print(self.user)\n d = runner.join()\n d.addBoth(lambda _: reactor.stop())\n reactor.run()\n print('complete')\n name = self.qle.text()\n db = db_handle()\n with db as con:\n sql = (\n \"select * from player where name = '{}' order by update_time\"\n .format(name))\n con.execute(sql)\n player = con.fetchone()\n if player:\n (id, name, win, match_count, strength, level, update_time, rank\n ) = player\n text = (\n '角色名: {}\\n胜场: {}\\n总场数: {}\\n团分: {}\\n团分排行: {}\\n等级: {}\\n更新时间: {}'\n .format(name, win, match_count, strength, rank, level,\n update_time))\n self.txt.setText(text)\n sql = (\"select * from player_data where name = '{}' order by date\"\n .format(name))\n con.execute(sql)\n player_data = con.fetchall()\n a = ''\n for data in player_data:\n a += str(data)\n a += '\\n'\n self.battle.setText(str(a))\n sql = 'select * from game_data order by match_id desc'\n con.execute(sql)\n game_data = con.fetchall()\n a = ''\n l = 0\n self.battle_table.setRowCount(len(game_data))\n for data in game_data:\n a += str(data[1:])\n print(type(data))\n for i in range(self.battle_table.columnCount()):\n item = QTableWidgetItem(str(data[i + 1]))\n item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter)\n self.battle_table.setItem(l, i, item)\n a += '\\n'\n self.player_status.setText(str(a))\n l += 1\n <mask token>\n <mask token>\n\n\nclass BatterReport(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.txt = QTextEdit()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Example(QWidget):\n\n\n class A(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.setGeometry(300, 300, 300, 220)\n self.setWindowTitle('Icon')\n self.setWindowIcon(QIcon('web.png'))\n self.show()\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.qle = QLineEdit('蔽月八云')\n self.user = self.qle.text()\n self.para = 'user={}'.format(self.user)\n print(self.user, '1')\n btn = QPushButton('查询', self)\n btn.resize(btn.sizeHint())\n btn.clicked.connect(self.search)\n self.txt = QTextEdit()\n self.battle = QTextEdit()\n self.player_status = QTextEdit()\n self.create_table()\n exitAction = QAction('Exit', self)\n exitAction.setShortcut('Ctrl+Q')\n exitAction.setStatusTip('application')\n exitAction.triggered.connect(qApp.quit)\n grid = QGridLayout()\n grid.setSpacing(10)\n grid.addWidget(self.qle, 1, 0)\n grid.addWidget(btn, 2, 0)\n grid.addWidget(self.txt, 3, 0)\n grid.addWidget(self.battle, 1, 1, 3, 1)\n grid.addWidget(self.player_status, 4, 0, 2, 2)\n grid.addWidget(self.battle_table, 6, 0, 2, 2)\n self.setLayout(grid)\n self.setGeometry(600, 600, 800, 600)\n self.center()\n self.setWindowTitle('战绩查询')\n self.show()\n\n def create_table(self):\n self.battle_table = QTableWidget()\n self.battle_table.setColumnCount(8)\n self.battle_table.setHorizontalHeaderLabels(['match_id', 'head',\n 'date', 'time', 'kill_count', 'death', 'support', 'score'])\n self.battle_table.setAlternatingRowColors(True)\n self.battle_table.setSelectionBehavior(QAbstractItemView.SelectRows)\n self.battle_table.resizeRowsToContents()\n self.battle_table.doubleClicked.connect(self.on_click)\n\n @pyqtSlot()\n def on_click(self):\n currentQTableWidgetItem = self.battle_table.selectedItems()[0]\n match_id = currentQTableWidgetItem.text()\n print(match_id)\n self.showDialog(match_id)\n\n def showDialog(self, match_id):\n data = requests.get('http://300report.jumpw.com/api/getmatch?id={}'\n .format(match_id))\n a = self.A()\n <mask token>\n\n def search(self):\n print(self.user)\n print(__name__)\n runner = CrawlerRunner(get_project_settings())\n print('a')\n runner.crawl('JumpReport', user=self.user)\n print(self.user)\n d = runner.join()\n d.addBoth(lambda _: reactor.stop())\n reactor.run()\n print('complete')\n name = self.qle.text()\n db = db_handle()\n with db as con:\n sql = (\n \"select * from player where name = '{}' order by update_time\"\n .format(name))\n con.execute(sql)\n player = con.fetchone()\n if player:\n (id, name, win, match_count, strength, level, update_time, rank\n ) = player\n text = (\n '角色名: {}\\n胜场: {}\\n总场数: {}\\n团分: {}\\n团分排行: {}\\n等级: {}\\n更新时间: {}'\n .format(name, win, match_count, strength, rank, level,\n update_time))\n self.txt.setText(text)\n sql = (\"select * from player_data where name = '{}' order by date\"\n .format(name))\n con.execute(sql)\n player_data = con.fetchall()\n a = ''\n for data in player_data:\n a += str(data)\n a += '\\n'\n self.battle.setText(str(a))\n sql = 'select * from game_data order by match_id desc'\n con.execute(sql)\n game_data = con.fetchall()\n a = ''\n l = 0\n self.battle_table.setRowCount(len(game_data))\n for data in game_data:\n a += str(data[1:])\n print(type(data))\n for i in range(self.battle_table.columnCount()):\n item = QTableWidgetItem(str(data[i + 1]))\n item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter)\n self.battle_table.setItem(l, i, item)\n a += '\\n'\n self.player_status.setText(str(a))\n l += 1\n <mask token>\n\n def closeEvent(self, event):\n reply = QMessageBox.question(self, 'Message', 'Quit?', QMessageBox.\n Yes | QMessageBox.No, QMessageBox.Yes)\n if reply == QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\nclass BatterReport(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.txt = QTextEdit()\n\n\n<mask token>\n", "step-4": "import sys\nfrom PyQt5.QtWidgets import *\nfrom PyQt5.QtGui import QIcon, QFont\nfrom PyQt5.QtCore import QCoreApplication\nimport pymysql\nimport requests\nfrom twisted.internet import reactor, defer\nfrom scrapy.crawler import CrawlerRunner, CrawlerProcess\nfrom scrapy.utils.project import get_project_settings\nfrom spider.jump_300heroes.jump_300heroes.spiders.my_report import JumpReport\nfrom scrapy.settings import Settings\nfrom PyQt5.QtCore import *\nfrom PyQt5.QtGui import *\nfrom multiprocessing import Process\n\n\ndef db_handle():\n con = pymysql.connect(host='localhost', user='web', passwd='web',\n charset='utf8', database='heroes')\n return con\n\n\nclass Example(QWidget):\n\n\n class A(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.setGeometry(300, 300, 300, 220)\n self.setWindowTitle('Icon')\n self.setWindowIcon(QIcon('web.png'))\n self.show()\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.qle = QLineEdit('蔽月八云')\n self.user = self.qle.text()\n self.para = 'user={}'.format(self.user)\n print(self.user, '1')\n btn = QPushButton('查询', self)\n btn.resize(btn.sizeHint())\n btn.clicked.connect(self.search)\n self.txt = QTextEdit()\n self.battle = QTextEdit()\n self.player_status = QTextEdit()\n self.create_table()\n exitAction = QAction('Exit', self)\n exitAction.setShortcut('Ctrl+Q')\n exitAction.setStatusTip('application')\n exitAction.triggered.connect(qApp.quit)\n grid = QGridLayout()\n grid.setSpacing(10)\n grid.addWidget(self.qle, 1, 0)\n grid.addWidget(btn, 2, 0)\n grid.addWidget(self.txt, 3, 0)\n grid.addWidget(self.battle, 1, 1, 3, 1)\n grid.addWidget(self.player_status, 4, 0, 2, 2)\n grid.addWidget(self.battle_table, 6, 0, 2, 2)\n self.setLayout(grid)\n self.setGeometry(600, 600, 800, 600)\n self.center()\n self.setWindowTitle('战绩查询')\n self.show()\n\n def create_table(self):\n self.battle_table = QTableWidget()\n self.battle_table.setColumnCount(8)\n self.battle_table.setHorizontalHeaderLabels(['match_id', 'head',\n 'date', 'time', 'kill_count', 'death', 'support', 'score'])\n self.battle_table.setAlternatingRowColors(True)\n self.battle_table.setSelectionBehavior(QAbstractItemView.SelectRows)\n self.battle_table.resizeRowsToContents()\n self.battle_table.doubleClicked.connect(self.on_click)\n\n @pyqtSlot()\n def on_click(self):\n currentQTableWidgetItem = self.battle_table.selectedItems()[0]\n match_id = currentQTableWidgetItem.text()\n print(match_id)\n self.showDialog(match_id)\n\n def showDialog(self, match_id):\n data = requests.get('http://300report.jumpw.com/api/getmatch?id={}'\n .format(match_id))\n a = self.A()\n\n def searchd(self):\n if __name__ == '__main__':\n p = Process(target=self.a)\n p.start()\n p.join()\n\n def search(self):\n print(self.user)\n print(__name__)\n runner = CrawlerRunner(get_project_settings())\n print('a')\n runner.crawl('JumpReport', user=self.user)\n print(self.user)\n d = runner.join()\n d.addBoth(lambda _: reactor.stop())\n reactor.run()\n print('complete')\n name = self.qle.text()\n db = db_handle()\n with db as con:\n sql = (\n \"select * from player where name = '{}' order by update_time\"\n .format(name))\n con.execute(sql)\n player = con.fetchone()\n if player:\n (id, name, win, match_count, strength, level, update_time, rank\n ) = player\n text = (\n '角色名: {}\\n胜场: {}\\n总场数: {}\\n团分: {}\\n团分排行: {}\\n等级: {}\\n更新时间: {}'\n .format(name, win, match_count, strength, rank, level,\n update_time))\n self.txt.setText(text)\n sql = (\"select * from player_data where name = '{}' order by date\"\n .format(name))\n con.execute(sql)\n player_data = con.fetchall()\n a = ''\n for data in player_data:\n a += str(data)\n a += '\\n'\n self.battle.setText(str(a))\n sql = 'select * from game_data order by match_id desc'\n con.execute(sql)\n game_data = con.fetchall()\n a = ''\n l = 0\n self.battle_table.setRowCount(len(game_data))\n for data in game_data:\n a += str(data[1:])\n print(type(data))\n for i in range(self.battle_table.columnCount()):\n item = QTableWidgetItem(str(data[i + 1]))\n item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter)\n self.battle_table.setItem(l, i, item)\n a += '\\n'\n self.player_status.setText(str(a))\n l += 1\n\n def center(self):\n qr = self.frameGeometry()\n cp = QDesktopWidget().availableGeometry().center()\n qr.moveCenter(cp)\n self.move(qr.topLeft())\n\n def closeEvent(self, event):\n reply = QMessageBox.question(self, 'Message', 'Quit?', QMessageBox.\n Yes | QMessageBox.No, QMessageBox.Yes)\n if reply == QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\nclass BatterReport(QWidget):\n\n def __init__(self):\n super().__init__()\n self.initUI()\n\n def initUI(self):\n self.txt = QTextEdit()\n\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n ex = Example()\n sys.exit(app.exec_())\n", "step-5": "import sys\nfrom PyQt5.QtWidgets import *\nfrom PyQt5.QtGui import QIcon, QFont\nfrom PyQt5.QtCore import QCoreApplication\n\nimport pymysql\nimport requests\n\nfrom twisted.internet import reactor, defer\nfrom scrapy.crawler import CrawlerRunner, CrawlerProcess\nfrom scrapy.utils.project import get_project_settings\nfrom spider.jump_300heroes.jump_300heroes.spiders.my_report import JumpReport\nfrom scrapy.settings import Settings\nfrom PyQt5.QtCore import *\nfrom PyQt5.QtGui import *\n\nfrom multiprocessing import Process\n\n\n\n\ndef db_handle():\n\n con = pymysql.connect(\n host='localhost',\n user='web',\n passwd='web',\n charset='utf8',\n database='heroes'\n )\n return con\n\nclass Example(QWidget):\n\n class A(QWidget):\n\n def __init__(self):\n super().__init__()\n\n self.initUI()\n\n def initUI(self):\n self.setGeometry(300, 300, 300, 220)\n self.setWindowTitle('Icon')\n self.setWindowIcon(QIcon('web.png'))\n\n self.show()\n\n def __init__(self):\n super().__init__()\n\n self.initUI()\n\n def initUI(self):\n\n #QToolTip.setFont(QFont('SanSerif', 10))\n\n #self.setToolTip('This is a <b>QWidget</b> widget')\n\n #textEdit = QTextEdit()\n #self.setCentralWidget(textEdit)\n\n self.qle = QLineEdit(\"蔽月八云\")\n self.user = self.qle.text()\n self.para = \"user={}\".format(self.user)\n print(self.user, '1')\n btn = QPushButton('查询', self)\n #btn.setToolTip('This is a <b>QPushButton</b> widget')\n btn.resize(btn.sizeHint())\n btn.clicked.connect(self.search)\n\n self.txt = QTextEdit()\n #self.txt.textChanged.connect(self.adjustSize)\n\n self.battle = QTextEdit()\n\n self.player_status = QTextEdit()\n\n self.create_table()\n\n\n\n # 名称不能用Quit、Exit,用了就无法显示,原因不明\n exitAction = QAction('Exit', self)\n exitAction.setShortcut('Ctrl+Q')\n exitAction.setStatusTip('application')\n exitAction.triggered.connect(qApp.quit)\n\n #self.statusBar()\n\n #menubar = QMainWindow.menuBar()\n\n # Mac OS的状态栏显示不一样\n #menubar.setNativeMenuBar(False)\n\n #fileMenu = menubar.addMenu('&File')\n #fileMenu.addAction(exitAction)\n\n #toolbar = self.addToolBar('Exit')\n #toolbar.addAction(exitAction)\n\n grid = QGridLayout()\n grid.setSpacing(10)\n\n grid.addWidget(self.qle, 1, 0)\n grid.addWidget(btn, 2, 0)\n grid.addWidget(self.txt, 3, 0)\n grid.addWidget(self.battle, 1, 1, 3, 1)\n grid.addWidget(self.player_status, 4, 0, 2, 2)\n grid.addWidget(self.battle_table, 6, 0, 2, 2)\n\n self.setLayout(grid)\n\n self.setGeometry(600, 600, 800, 600)\n self.center()\n self.setWindowTitle(\"战绩查询\")\n\n self.show()\n\n def create_table(self):\n # 设置表\n self.battle_table = QTableWidget()\n # 表列数,行数在下方读取数据时,根据数据量建立\n self.battle_table.setColumnCount(8)\n # 设置表头\n self.battle_table.setHorizontalHeaderLabels(\n ['match_id', 'head', 'date', 'time', 'kill_count', 'death', 'support', 'score'])\n # 隔行变色\n self.battle_table.setAlternatingRowColors(True)\n # 整行选中\n self.battle_table.setSelectionBehavior(QAbstractItemView.SelectRows)\n # 将列调整到跟内容大小相匹配\n # self.battle_table.resizeColumnsToContents()\n # #将行大小调整到跟内容的大小相匹配\n self.battle_table.resizeRowsToContents()\n # 点击事件\n self.battle_table.doubleClicked.connect(self.on_click)\n\n @pyqtSlot()\n def on_click(self):\n currentQTableWidgetItem = self.battle_table.selectedItems()[0]\n # 点击的行包含的比赛id\n #match_id = self.battle_table.item(currentQTableWidgetItem.row(), 0).text()\n match_id = currentQTableWidgetItem.text()\n print(match_id)\n self.showDialog(match_id)\n\n def showDialog(self, match_id):\n\n data = requests.get('http://300report.jumpw.com/api/getmatch?id={}'.format(match_id))\n a = self.A()\n\n ## 启动爬虫,获取该场比赛所有人的数据\n #runner = CrawlerRunner(get_project_settings())\n #runner.crawl('JumpReport')\n #d = runner.join()\n #d.addBoth(lambda _: reactor.stop())\n #reactor.run() # 阻塞运行爬虫\n #\n #text, ok = QInputDialog.getText(self, 'Input Dialog',\n # 'Enter your name:')\n\n\n\n def searchd(self):\n if __name__ == '__main__':\n #print(user, '2')\n p = Process(target=self.a)\n p.start()\n p.join()\n\n def search(self):\n print(self.user)\n print(__name__)\n #print(user, '3')\n\n\n #process = CrawlerProcess(get_project_settings())\n #process.crawl('JumpReport')\n #process.start()\n #process.stop()\n #process.put()\n # 脚本执行爬虫代码\n runner = CrawlerRunner(get_project_settings())\n\n #def search(runner, keyword):\n # return runner.crawl(JumpReport, keyword)\n\n #runner = CrawlerProcess()\n #dfs = set()\n print('a')\n runner.crawl('JumpReport', user=self.user)\n print(self.user)\n d = runner.join()\n #dfs.add(d)\n #defer.DeferredList(dfs).addBoth(lambda _: reactor.stop())\n d.addBoth(lambda _: reactor.stop())\n #search(runner, \"abcd\")\n #search(runner, \"beat\")\n #runner.start()\n reactor.run() # 阻塞运行爬虫\n\n print(\"complete\")\n\n\n # runner = CrawlerRunner(get_project_settings())\n # dfs = set()\n # for domain in range(2):\n # d = runner.crawl('JumpReport')\n # dfs.add(d)\n #\n # defer.DeferredList(dfs).addBoth(lambda _: reactor.stop())\n # reactor.run() # the script will block here until all crawling jobs are finished\n\n # runner = CrawlerRunner(get_project_settings())\n #\n # @defer.inlineCallbacks\n # def crawl():\n # for domain in range(2):\n # yield runner.crawl('JumpReport')\n # reactor.stop()\n #\n # crawl()\n # reactor.run() # the script will block here until the last crawl call is finished\n\n # settings = Settings({'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'})\n # runner = CrawlerRunner(settings)\n # \n # d = runner.crawl(JumpReport)\n # d.addBoth(lambda _: reactor.stop())\n # reactor.run() # the script will block here until the crawling is finished\n\n\n # runner = CrawlerProcess(get_project_settings())\n # runner.crawl(JumpReport)\n # runner.start()\n\n name = self.qle.text()\n db = db_handle()\n with db as con:\n sql = \"select * from player where name = '{}' order by update_time\".format(name)\n con.execute(sql)\n player = con.fetchone()\n if player:\n id, name, win, match_count, strength, level, update_time, rank = player\n text = \"角色名: {}\\n胜场: {}\\n总场数: {}\\n团分: {}\\n团分排行: {}\\n等级: {}\\n更新时间: {}\".format(\n name, win, match_count, strength, rank, level, update_time)\n \n self.txt.setText(text)\n \n sql = \"select * from player_data where name = '{}' order by date\".format(name)\n con.execute(sql)\n player_data = con.fetchall()\n a = \"\"\n for data in player_data:\n a += str(data)\n a += \"\\n\"\n self.battle.setText(str(a))\n\n sql = \"select * from game_data order by match_id desc\"\n con.execute(sql)\n game_data = con.fetchall()\n a = \"\"\n l = 0\n self.battle_table.setRowCount(len(game_data))\n for data in game_data:\n a += str(data[1:])\n print(type(data))\n\n for i in range(self.battle_table.columnCount()):\n\n item = QTableWidgetItem(str(data[i + 1]))\n # 设置填入数据的排列位置(左右居中| 上下居中)\n item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter)\n self.battle_table.setItem(l, i, item)\n\n a += \"\\n\"\n self.player_status.setText(str(a))\n l += 1\n #for i in range(len(list(a))):\n # self.battle_table.setLayout(str(a))\n\n def center(self):\n\n qr = self.frameGeometry()\n cp = QDesktopWidget().availableGeometry().center()\n qr.moveCenter(cp)\n self.move(qr.topLeft())\n\n def closeEvent(self, event):\n\n reply = QMessageBox.question(self, 'Message', \"Quit?\", QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)\n\n if reply == QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\nclass BatterReport(QWidget):\n\n def __init__(self):\n super().__init__()\n\n self.initUI()\n\n def initUI(self):\n self.txt = QTextEdit()\n\n\nif __name__ == '__main__':\n\n app = QApplication(sys.argv)\n\n ex = Example()\n\n sys.exit(app.exec_())\n", "step-ids": [ 7, 8, 11, 16, 17 ] }
[ 7, 8, 11, 16, 17 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> class Solution(object): <|reserved_special_token_0|> <|reserved_special_token_1|> class Solution(object): def removeNthFromEnd(self, head, n): dummy = ListNode(-1) dummy.next = head first, second = dummy, dummy for i in range(n): first = first.next while first.next: first = first.next second = second.next second.next = second.next.next return dummy.next
flexible
{ "blob_id": "7e71c97070285b051b23448c755e3d41b2909dda", "index": 3884, "step-1": "<mask token>\n", "step-2": "class Solution(object):\n <mask token>\n", "step-3": "class Solution(object):\n\n def removeNthFromEnd(self, head, n):\n dummy = ListNode(-1)\n dummy.next = head\n first, second = dummy, dummy\n for i in range(n):\n first = first.next\n while first.next:\n first = first.next\n second = second.next\n second.next = second.next.next\n return dummy.next\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# -*- coding: utf-8 -*- import scrapy import json, time, sys, random, re, pyssdb from scrapy.utils.project import get_project_settings from spider.items import GoodsSalesItem goods_list = [] '''获取店铺内产品信息''' class PddMallGoodsSpider(scrapy.Spider): name = 'pdd_mall_goods' mall_id_hash = 'pdd_mall_id_hash' hash_num = 0 ssdb_client = '' process_nums = 1 limit = 100 def __init__(self, hash_num = 0, process_nums = 1): self.ssdb_client = pyssdb.Client(get_project_settings().get('SSDB_HOST'), 8888) self.hash_num = int(hash_num) ##当前脚本号 self.process_nums = int(process_nums) ##脚本总数 self.pageSize = 500 ##每次抓取的产品数 最大只返回500 def start_requests(self): mall_nums = self.limit * int(self.process_nums) ##一次查询的数量 is_end = False start_mall_id = '' ##起始查询的店铺key while not is_end: mall_ids = self.ssdb_client.hkeys(self.mall_id_hash, start_mall_id, '', mall_nums) if not mall_ids: ##没有数据返回 is_end = True continue for mall_id in mall_ids: mall_id = int( mall_id.decode('utf-8') ) start_mall_id = mall_id if mall_id % self.process_nums != self.hash_num: continue goods_list=[] page = 1 headers = self.make_headers() url = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500' meta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list} yield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers) def parse(self, response): pass goods_list=response.meta['goods_list'] ##产品集合 mall_id = response.meta['mall_id'] ##店铺ID page = response.meta['page'] ##每返回一次页面数据 记录页数 mall_goods = response.body.decode('utf-8') ##bytes转换为str mall_goods = json.loads(mall_goods) goods_len = len(mall_goods['goods_list']) if goods_len > 0: goods_list = goods_list + mall_goods['goods_list'] ##合并产品列表 if goods_len > self.pageSize - 100: page += 1 ##继续采集下一页面 url = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500' meta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list} headers = self.make_headers() yield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers) else: if goods_list: item = GoodsSalesItem() item['goods_list'] = goods_list item['mall_id'] = mall_id yield item '''生成headers头信息''' def make_headers(self): chrome_version = str(random.randint(59,63))+'.0.'+str(random.randint(1000,3200))+'.94' headers = { "Host":"yangkeduo.com", "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "Accept-Language":"zh-CN,zh;q=0.9,en;q=0.8", "Accept-Encoding":"gzip, deflate", "Host":"yangkeduo.com", "Referer":"http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026", "Connection":"keep-alive", 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'+chrome_version+' Safari/537.36', } ip = str(random.randint(100, 200))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255)) headers['CLIENT-IP'] = ip headers['X-FORWARDED-FOR']= ip return headers
normal
{ "blob_id": "f33190df35a6b0b91c4dd2d6a58291451d06e29a", "index": 3529, "step-1": "<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n <mask token>\n <mask token>\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n", "step-2": "<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, hash_num=0, process_nums=1):\n self.ssdb_client = pyssdb.Client(get_project_settings().get(\n 'SSDB_HOST'), 8888)\n self.hash_num = int(hash_num)\n self.process_nums = int(process_nums)\n self.pageSize = 500\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n <mask token>\n <mask token>\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n", "step-3": "<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, hash_num=0, process_nums=1):\n self.ssdb_client = pyssdb.Client(get_project_settings().get(\n 'SSDB_HOST'), 8888)\n self.hash_num = int(hash_num)\n self.process_nums = int(process_nums)\n self.pageSize = 500\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n\n def parse(self, response):\n pass\n goods_list = response.meta['goods_list']\n mall_id = response.meta['mall_id']\n page = response.meta['page']\n mall_goods = response.body.decode('utf-8')\n mall_goods = json.loads(mall_goods)\n goods_len = len(mall_goods['goods_list'])\n if goods_len > 0:\n goods_list = goods_list + mall_goods['goods_list']\n if goods_len > self.pageSize - 100:\n page += 1\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) + '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list': goods_list}\n headers = self.make_headers()\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n elif goods_list:\n item = GoodsSalesItem()\n item['goods_list'] = goods_list\n item['mall_id'] = mall_id\n yield item\n <mask token>\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n", "step-4": "import scrapy\nimport json, time, sys, random, re, pyssdb\nfrom scrapy.utils.project import get_project_settings\nfrom spider.items import GoodsSalesItem\ngoods_list = []\n<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n name = 'pdd_mall_goods'\n mall_id_hash = 'pdd_mall_id_hash'\n hash_num = 0\n ssdb_client = ''\n process_nums = 1\n limit = 100\n\n def __init__(self, hash_num=0, process_nums=1):\n self.ssdb_client = pyssdb.Client(get_project_settings().get(\n 'SSDB_HOST'), 8888)\n self.hash_num = int(hash_num)\n self.process_nums = int(process_nums)\n self.pageSize = 500\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n\n def parse(self, response):\n pass\n goods_list = response.meta['goods_list']\n mall_id = response.meta['mall_id']\n page = response.meta['page']\n mall_goods = response.body.decode('utf-8')\n mall_goods = json.loads(mall_goods)\n goods_len = len(mall_goods['goods_list'])\n if goods_len > 0:\n goods_list = goods_list + mall_goods['goods_list']\n if goods_len > self.pageSize - 100:\n page += 1\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) + '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list': goods_list}\n headers = self.make_headers()\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n elif goods_list:\n item = GoodsSalesItem()\n item['goods_list'] = goods_list\n item['mall_id'] = mall_id\n yield item\n \"\"\"生成headers头信息\"\"\"\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n", "step-5": "# -*- coding: utf-8 -*-\r\nimport scrapy\r\nimport json, time, sys, random, re, pyssdb\r\n\r\nfrom scrapy.utils.project import get_project_settings\r\n\r\nfrom spider.items import GoodsSalesItem\r\n\r\ngoods_list = []\r\n'''获取店铺内产品信息'''\r\nclass PddMallGoodsSpider(scrapy.Spider):\r\n\tname = 'pdd_mall_goods'\r\n\tmall_id_hash \t= 'pdd_mall_id_hash'\r\n\thash_num \t\t= 0\r\n\tssdb_client = ''\r\n\tprocess_nums \t= 1\r\n\tlimit\t\t\t= 100\r\n\r\n\tdef __init__(self, hash_num = 0, process_nums = 1):\r\n\t\tself.ssdb_client = pyssdb.Client(get_project_settings().get('SSDB_HOST'), 8888)\r\n\t\tself.hash_num = int(hash_num) ##当前脚本号\r\n\t\tself.process_nums = int(process_nums) ##脚本总数\r\n\t\tself.pageSize = 500 ##每次抓取的产品数 最大只返回500\r\n\r\n\tdef start_requests(self):\r\n\t\tmall_nums \t\t= \tself.limit * int(self.process_nums) ##一次查询的数量\r\n\r\n\t\tis_end \t\t\t=\tFalse\r\n\t\tstart_mall_id \t=\t'' ##起始查询的店铺key\r\n\t\twhile not is_end:\r\n\t\t\tmall_ids \t=\tself.ssdb_client.hkeys(self.mall_id_hash, start_mall_id, '', mall_nums)\r\n\t\t\t\r\n\t\t\tif not mall_ids: ##没有数据返回\r\n\t\t\t\tis_end \t=\tTrue\r\n\t\t\t\tcontinue\r\n\r\n\t\t\tfor mall_id in mall_ids:\r\n\t\t\t\tmall_id = int( mall_id.decode('utf-8') )\r\n\t\t\t\tstart_mall_id = mall_id\r\n\r\n\t\t\t\tif mall_id % self.process_nums != self.hash_num:\r\n\t\t\t\t\tcontinue\r\n\t\t\t\t\t\r\n\t\t\t\tgoods_list=[]\r\n\t\t\t\tpage = 1\r\n\r\n\t\t\t\theaders = self.make_headers()\r\n\t\t\t\turl = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500'\r\n\t\t\t\tmeta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list}\r\n\t\t\t\tyield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers)\r\n\t\t\t\r\n\tdef parse(self, response):\r\n\t\tpass\r\n\t\tgoods_list=response.meta['goods_list'] ##产品集合\r\n\t\tmall_id = response.meta['mall_id'] ##店铺ID\r\n\t\tpage \t = response.meta['page'] ##每返回一次页面数据 记录页数\r\n\r\n\t\tmall_goods = response.body.decode('utf-8') ##bytes转换为str\r\n\t\tmall_goods = json.loads(mall_goods)\r\n\r\n\t\tgoods_len = len(mall_goods['goods_list'])\r\n\r\n\t\tif goods_len > 0:\r\n\t\t\tgoods_list = goods_list + mall_goods['goods_list'] ##合并产品列表\r\n\r\n\t\tif goods_len > self.pageSize - 100:\r\n\t\t\tpage += 1\r\n\t\t\t##继续采集下一页面\r\n\t\t\turl = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500'\r\n\t\t\tmeta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list}\r\n\t\t\theaders = self.make_headers()\r\n\t\t\tyield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers)\r\n\t\telse:\r\n\t\t\tif goods_list:\r\n\t\t\t\titem = GoodsSalesItem()\r\n\t\t\t\titem['goods_list'] = goods_list\r\n\t\t\t\titem['mall_id'] = mall_id\r\n\t\t\t\tyield item\r\n\r\n\t'''生成headers头信息'''\r\n\tdef make_headers(self):\r\n\t\tchrome_version = str(random.randint(59,63))+'.0.'+str(random.randint(1000,3200))+'.94'\r\n\t\theaders = {\r\n\t\t\t\"Host\":\"yangkeduo.com\",\r\n\t\t\t\"Accept\":\"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\",\r\n\t\t\t\"Accept-Language\":\"zh-CN,zh;q=0.9,en;q=0.8\",\r\n\t\t\t\"Accept-Encoding\":\"gzip, deflate\",\r\n\t\t\t\"Host\":\"yangkeduo.com\",\r\n\t\t\t\"Referer\":\"http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026\",\r\n\t\t\t\"Connection\":\"keep-alive\",\r\n\t\t\t'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'+chrome_version+' Safari/537.36',\r\n\t\t}\r\n\t\t\r\n\t\tip = str(random.randint(100, 200))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255))\r\n\t\theaders['CLIENT-IP'] \t=\tip\r\n\t\theaders['X-FORWARDED-FOR']=\tip\r\n\t\treturn headers", "step-ids": [ 3, 4, 5, 9, 10 ] }
[ 3, 4, 5, 9, 10 ]
<|reserved_special_token_0|> def is_mango_seller(name): return name[-1] == 'm' def search_mango_seller(name): search_queue = deque() searched = [] global graph search_queue += graph[name] while search_queue: person = search_queue.popleft() if not person in searched: if is_mango_seller(person): print('%s is a mango seller' % person) return True else: search_queue += graph[person] searched.append(person) return False <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def is_mango_seller(name): return name[-1] == 'm' def search_mango_seller(name): search_queue = deque() searched = [] global graph search_queue += graph[name] while search_queue: person = search_queue.popleft() if not person in searched: if is_mango_seller(person): print('%s is a mango seller' % person) return True else: search_queue += graph[person] searched.append(person) return False search_mango_seller('you') <|reserved_special_token_1|> <|reserved_special_token_0|> graph = {} graph['you'] = ['alice', 'bob', 'claire'] graph['bob'] = ['anuj', 'peggy'] graph['alice'] = ['peggy'] graph['claire'] = ['thom', 'jonny'] graph['anuj'] = [] graph['peggy'] = [] graph['thom'] = [] graph['jonny'] = [] def is_mango_seller(name): return name[-1] == 'm' def search_mango_seller(name): search_queue = deque() searched = [] global graph search_queue += graph[name] while search_queue: person = search_queue.popleft() if not person in searched: if is_mango_seller(person): print('%s is a mango seller' % person) return True else: search_queue += graph[person] searched.append(person) return False search_mango_seller('you') <|reserved_special_token_1|> from collections import deque graph = {} graph['you'] = ['alice', 'bob', 'claire'] graph['bob'] = ['anuj', 'peggy'] graph['alice'] = ['peggy'] graph['claire'] = ['thom', 'jonny'] graph['anuj'] = [] graph['peggy'] = [] graph['thom'] = [] graph['jonny'] = [] def is_mango_seller(name): return name[-1] == 'm' def search_mango_seller(name): search_queue = deque() searched = [] global graph search_queue += graph[name] while search_queue: person = search_queue.popleft() if not person in searched: if is_mango_seller(person): print('%s is a mango seller' % person) return True else: search_queue += graph[person] searched.append(person) return False search_mango_seller('you') <|reserved_special_token_1|> #!/usr/bin/env python # -*- coding: utf-8 -*- # @Author: WuTian # @Date : 2018/5/3 # @Contact : jsj0804wt@126.com # @Desc :使用广度优先搜索查找芒果商 from collections import deque graph = {} graph["you"] = ["alice", "bob", "claire"] graph["bob"] = ["anuj", "peggy"] graph["alice"] = ["peggy"] graph["claire"] = ["thom", "jonny"] graph["anuj"] = [] graph["peggy"] = [] graph["thom"] = [] graph["jonny"] = [] def is_mango_seller(name): return name[-1] == "m" def search_mango_seller(name): search_queue = deque() searched = [] global graph search_queue += graph[name] while search_queue: person = search_queue.popleft() if not person in searched: if is_mango_seller(person): print("%s is a mango seller" % person) return True else: search_queue += graph[person] searched.append(person) return False search_mango_seller("you")
flexible
{ "blob_id": "e881fcfce933d8f3bafcbaab039ddcf98827bf5e", "index": 4244, "step-1": "<mask token>\n\n\ndef is_mango_seller(name):\n return name[-1] == 'm'\n\n\ndef search_mango_seller(name):\n search_queue = deque()\n searched = []\n global graph\n search_queue += graph[name]\n while search_queue:\n person = search_queue.popleft()\n if not person in searched:\n if is_mango_seller(person):\n print('%s is a mango seller' % person)\n return True\n else:\n search_queue += graph[person]\n searched.append(person)\n return False\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef is_mango_seller(name):\n return name[-1] == 'm'\n\n\ndef search_mango_seller(name):\n search_queue = deque()\n searched = []\n global graph\n search_queue += graph[name]\n while search_queue:\n person = search_queue.popleft()\n if not person in searched:\n if is_mango_seller(person):\n print('%s is a mango seller' % person)\n return True\n else:\n search_queue += graph[person]\n searched.append(person)\n return False\n\n\nsearch_mango_seller('you')\n", "step-3": "<mask token>\ngraph = {}\ngraph['you'] = ['alice', 'bob', 'claire']\ngraph['bob'] = ['anuj', 'peggy']\ngraph['alice'] = ['peggy']\ngraph['claire'] = ['thom', 'jonny']\ngraph['anuj'] = []\ngraph['peggy'] = []\ngraph['thom'] = []\ngraph['jonny'] = []\n\n\ndef is_mango_seller(name):\n return name[-1] == 'm'\n\n\ndef search_mango_seller(name):\n search_queue = deque()\n searched = []\n global graph\n search_queue += graph[name]\n while search_queue:\n person = search_queue.popleft()\n if not person in searched:\n if is_mango_seller(person):\n print('%s is a mango seller' % person)\n return True\n else:\n search_queue += graph[person]\n searched.append(person)\n return False\n\n\nsearch_mango_seller('you')\n", "step-4": "from collections import deque\ngraph = {}\ngraph['you'] = ['alice', 'bob', 'claire']\ngraph['bob'] = ['anuj', 'peggy']\ngraph['alice'] = ['peggy']\ngraph['claire'] = ['thom', 'jonny']\ngraph['anuj'] = []\ngraph['peggy'] = []\ngraph['thom'] = []\ngraph['jonny'] = []\n\n\ndef is_mango_seller(name):\n return name[-1] == 'm'\n\n\ndef search_mango_seller(name):\n search_queue = deque()\n searched = []\n global graph\n search_queue += graph[name]\n while search_queue:\n person = search_queue.popleft()\n if not person in searched:\n if is_mango_seller(person):\n print('%s is a mango seller' % person)\n return True\n else:\n search_queue += graph[person]\n searched.append(person)\n return False\n\n\nsearch_mango_seller('you')\n", "step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# @Author: WuTian\n# @Date : 2018/5/3\n# @Contact : jsj0804wt@126.com\n# @Desc :使用广度优先搜索查找芒果商\nfrom collections import deque\n\ngraph = {}\ngraph[\"you\"] = [\"alice\", \"bob\", \"claire\"]\ngraph[\"bob\"] = [\"anuj\", \"peggy\"]\ngraph[\"alice\"] = [\"peggy\"]\ngraph[\"claire\"] = [\"thom\", \"jonny\"]\ngraph[\"anuj\"] = []\ngraph[\"peggy\"] = []\ngraph[\"thom\"] = []\ngraph[\"jonny\"] = []\n\n\ndef is_mango_seller(name):\n return name[-1] == \"m\"\n\n\ndef search_mango_seller(name):\n search_queue = deque()\n searched = []\n global graph\n search_queue += graph[name]\n while search_queue:\n person = search_queue.popleft()\n if not person in searched:\n if is_mango_seller(person):\n print(\"%s is a mango seller\" % person)\n return True\n else:\n search_queue += graph[person]\n searched.append(person)\n return False\n\nsearch_mango_seller(\"you\")\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
#ABC114 A - クイズ print("ABC" if input()=="1" else "chokudai")
normal
{ "blob_id": "14d31a4b7491a7f7a64cd151e79c23546e4a3cd2", "index": 7683, "step-1": "<mask token>\n", "step-2": "print('ABC' if input() == '1' else 'chokudai')\n", "step-3": "#ABC114 A - クイズ\nprint(\"ABC\" if input()==\"1\" else \"chokudai\")\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# Generated by Django 3.2 on 2021-04-21 13:21 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('rate', '0003_auto_20210421_1316'), ] operations = [ migrations.AlterField( model_name='song', name='overall_rating', field=models.FloatField(default=0), ), migrations.AlterField( model_name='song', name='rating_count', field=models.FloatField(default=0), ), ]
normal
{ "blob_id": "d46cda5354640e1c87432d39a2e949d6db034edc", "index": 6413, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('rate', '0003_auto_20210421_1316')]\n operations = [migrations.AlterField(model_name='song', name=\n 'overall_rating', field=models.FloatField(default=0)), migrations.\n AlterField(model_name='song', name='rating_count', field=models.\n FloatField(default=0))]\n", "step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('rate', '0003_auto_20210421_1316')]\n operations = [migrations.AlterField(model_name='song', name=\n 'overall_rating', field=models.FloatField(default=0)), migrations.\n AlterField(model_name='song', name='rating_count', field=models.\n FloatField(default=0))]\n", "step-5": "# Generated by Django 3.2 on 2021-04-21 13:21\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('rate', '0003_auto_20210421_1316'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='song',\n name='overall_rating',\n field=models.FloatField(default=0),\n ),\n migrations.AlterField(\n model_name='song',\n name='rating_count',\n field=models.FloatField(default=0),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#!/usr/bin/python #encoding=utf8 import sys import tushare as ts def local_main(): if len(sys.argv) != 2: print sys.argv[0], " [stock id]" return stock_id = sys.argv[1] df = ts.get_hist_data(stock_id) df.to_excel(stock_id + '_his.xlsx', sheet_name = stock_id) if __name__ == '__main__': local_main()
normal
{ "blob_id": "81a53d08ab36e85dd49cf1f3d9c22c1f18605149", "index": 6233, "step-1": "#!/usr/bin/python\n#encoding=utf8\n\nimport sys\nimport tushare as ts\n\ndef local_main():\n if len(sys.argv) != 2:\n print sys.argv[0], \" [stock id]\"\n return\n\n stock_id = sys.argv[1]\n df = ts.get_hist_data(stock_id)\n df.to_excel(stock_id + '_his.xlsx', sheet_name = stock_id)\n\nif __name__ == '__main__':\n local_main()", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> class UIMainWindow(object): <|reserved_special_token_0|> def retranslateUI(self): _translate = QtCore.QCoreApplication.translate self.main_window.setWindowTitle(_translate('main_window', 'SentiCompare')) self.add_button.setText(_translate('main_window', 'Add')) self.delete_button.setText(_translate('main_window', 'Delete')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .select_files_tab), _translate('main_window', 'Select Files')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .manual_input_tab), _translate('main_window', 'Manual Input')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .results_tab), _translate('main_window', 'Results')) self.run_button.setText(_translate('main_window', 'Run')) self.quit_button.setText(_translate('main_window', 'Quit')) self.pronoun_checkbox.setText(_translate('main_window', 'Pronoun Usage')) self.lexical_checkbox.setText(_translate('main_window', 'Lexical')) self.rule_based_checkbox.setText(_translate('main_window', 'Rule Based')) self.machine_learning_checkbox.setText(_translate('main_window', 'Machine Learning')) self.branding_label.setText(_translate('main_window', 'SentiCompare')) def showWindow(self): self.main_window.show() def selectFiles(self): file_dialog = FileDialog(self.main_window) file_dialog.setFilters(['Text files (*.txt)']) file_dialog.setDefaultFilterIndex = 0 file_dialog.setDefaultDirectory(os.path.expanduser('~')) file_dialog.exec() if file_dialog.getPath() == '': return elif file_dialog.getFilename()[2] == '': for file in os.listdir(file_dialog.getPath()): if file.endswith('.txt') and not file.startswith('.'): file_path = os.path.join(file_dialog.getPath(), file) if file_path not in self.selected_files: self.selected_files[file] = file_path item = QStandardItem(file) item.setCheckable(True) self.file_view_model.appendRow(item) elif file_dialog.getPath() not in self.selected_files: self.selected_files[file_dialog.getFilename()[1] ] = file_dialog.getPath() item = QStandardItem(file_dialog.getFilename()[1]) item.setCheckable(True) self.file_view_model.appendRow(item) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class UIMainWindow(object): def __init__(self): font = QtGui.QFont() font.setFamily('Myriad Pro') font.setPointSize(14) self.main_window = QtWidgets.QWidget() self.main_window.setFont(font) self.main_window.setObjectName('main_window') self.main_window.setWindowModality(QtCore.Qt.WindowModal) self.main_window.resize(450, 460) size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) size_policy.setHorizontalStretch(0) size_policy.setVerticalStretch(0) size_policy.setHeightForWidth(self.main_window.sizePolicy(). hasHeightForWidth()) self.main_window.setSizePolicy(size_policy) self.main_window.setMinimumSize(QtCore.QSize(450, 460)) self.main_window.setMaximumSize(QtCore.QSize(450, 460)) self.main_window.setBaseSize(QtCore.QSize(450, 460)) self.branding_icon = QtWidgets.QLabel(self.main_window) self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90)) self.branding_icon.setText('') self.branding_icon.setPixmap(QtGui.QPixmap( '../images/senticompare_logo.png')) self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt. AlignVCenter) self.branding_icon.setObjectName('branding_icon') self.branding_label = QtWidgets.QLabel(self.main_window) self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90)) palette = QtGui.QPalette() brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush) self.branding_label.setPalette(palette) font = QtGui.QFont() font.setFamily('Optima') font.setPointSize(50) self.branding_label.setFont(font) self.branding_label.setObjectName('branding_label') self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window) self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, 430, 50)) self.horizontal_layout_widget_1.setObjectName( 'horizontal_layout_widget_1') self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self. horizontal_layout_widget_1) self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_1.setObjectName('horizontal_layout_1') self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1 ) self.run_button.setObjectName('run_button') self.run_button.clicked.connect(self.run) self.horizontal_layout_1.addWidget(self.run_button) self.quit_button = QtWidgets.QPushButton(self. horizontal_layout_widget_1) self.quit_button.setObjectName('quit_button') self.quit_button.clicked.connect(self.main_window.close) self.horizontal_layout_1.addWidget(self.quit_button) self.select_files_tab = QtWidgets.QWidget() self.select_files_tab.setObjectName('select_files_tab') self.horizontal_layout_widget_2 = QtWidgets.QWidget(self. select_files_tab) self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, 230, 50)) self.horizontal_layout_widget_2.setObjectName( 'horizontal_layout_widget_2') self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self. horizontal_layout_widget_2) self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_2.setObjectName('horizontal_layout_2') font.setFamily('Myriad Pro') font.setPointSize(12) self.input_output_box = QtWidgets.QTabWidget(self.main_window) self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300)) self.input_output_box.setFont(font) self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt. PointingHandCursor)) self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North) self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded) self.input_output_box.setTabsClosable(False) self.input_output_box.setObjectName('input_output_box') self.file_view = QtWidgets.QListView(self.select_files_tab) self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210)) self.file_view.setObjectName('file_view') self.file_view_model = QStandardItemModel(self.file_view) self.file_view.setModel(self.file_view_model) self.file_view.show() self.input_output_box.addTab(self.select_files_tab, '') self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2 ) self.add_button.setFont(font) self.add_button.setObjectName('add_button') self.add_button.clicked.connect(self.selectFiles) self.horizontal_layout_2.addWidget(self.add_button) self.delete_button = QtWidgets.QPushButton(self. horizontal_layout_widget_2) self.delete_button.setFont(font) self.delete_button.setObjectName('delete_button') self.delete_button.clicked.connect(self.removeFiles) self.horizontal_layout_2.addWidget(self.delete_button) self.manual_input_tab = QtWidgets.QWidget() self.manual_input_tab.setObjectName('manual_input_tab') self.text_input = QtWidgets.QTextEdit(self.manual_input_tab) self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.text_input.setObjectName('text_input') self.input_output_box.addTab(self.manual_input_tab, '') self.results_tab = QtWidgets.QWidget() self.results_tab.setObjectName('results_tab') self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab) self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.results_scroll_box.setWidgetResizable(True) self.results_scroll_box.setObjectName('results_scroll_box') self.results_content = QtWidgets.QWidget() self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250)) self.results_content.setObjectName('results_content') self.results_scroll_box.setWidget(self.results_content) self.results_content_text = QtWidgets.QTextEdit(self.results_content) self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250)) self.results_content_text.setReadOnly(True) self.results_content_text.setObjectName('results_content_text') self.input_output_box.addTab(self.results_tab, '') self.input_output_box.setTabEnabled(2, False) font.setPointSize(14) self.group_box_1 = QtWidgets.QGroupBox(self.main_window) self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140)) self.group_box_1.setFont(font) self.group_box_1.setTitle('') self.group_box_1.setAlignment(QtCore.Qt.AlignCenter) self.group_box_1.setFlat(False) self.group_box_1.setCheckable(False) self.group_box_1.setObjectName('group_box_1') self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1) self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141)) self.vertical_layout_widget_1.setObjectName('vertical_layout_widget_1') self.vertical_layout_1 = QtWidgets.QVBoxLayout(self. vertical_layout_widget_1) self.vertical_layout_1.setContentsMargins(0, 0, 0, 0) self.vertical_layout_1.setObjectName('vertical_layout_1') self.pronoun_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.pronoun_checkbox.setFont(font) self.pronoun_checkbox.setObjectName('pronoun_checkbox') self.vertical_layout_1.addWidget(self.pronoun_checkbox) self.lexical_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.lexical_checkbox.setFont(font) self.lexical_checkbox.setObjectName('lexical_checkbox') self.vertical_layout_1.addWidget(self.lexical_checkbox) self.rule_based_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.rule_based_checkbox.setFont(font) self.rule_based_checkbox.setObjectName('rule_based_checkbox') self.vertical_layout_1.addWidget(self.rule_based_checkbox) self.machine_learning_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.machine_learning_checkbox.setFont(font) self.machine_learning_checkbox.setObjectName( 'machine_learning_checkbox') self.vertical_layout_1.addWidget(self.machine_learning_checkbox) self.help_scroll_box = QtWidgets.QScrollArea(self.main_window) self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140)) self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel) self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken) self.help_scroll_box.setWidgetResizable(True) self.help_scroll_box.setObjectName('help_scroll_box') self.help_content = QtWidgets.QWidget() self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138)) self.help_content.setObjectName('help_content') self.help_scroll_box.setWidget(self.help_content) self.selected_files = {} self.input_output_box.setCurrentIndex(0) self.retranslateUI() QtCore.QMetaObject.connectSlotsByName(self.main_window) def retranslateUI(self): _translate = QtCore.QCoreApplication.translate self.main_window.setWindowTitle(_translate('main_window', 'SentiCompare')) self.add_button.setText(_translate('main_window', 'Add')) self.delete_button.setText(_translate('main_window', 'Delete')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .select_files_tab), _translate('main_window', 'Select Files')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .manual_input_tab), _translate('main_window', 'Manual Input')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .results_tab), _translate('main_window', 'Results')) self.run_button.setText(_translate('main_window', 'Run')) self.quit_button.setText(_translate('main_window', 'Quit')) self.pronoun_checkbox.setText(_translate('main_window', 'Pronoun Usage')) self.lexical_checkbox.setText(_translate('main_window', 'Lexical')) self.rule_based_checkbox.setText(_translate('main_window', 'Rule Based')) self.machine_learning_checkbox.setText(_translate('main_window', 'Machine Learning')) self.branding_label.setText(_translate('main_window', 'SentiCompare')) def showWindow(self): self.main_window.show() def selectFiles(self): file_dialog = FileDialog(self.main_window) file_dialog.setFilters(['Text files (*.txt)']) file_dialog.setDefaultFilterIndex = 0 file_dialog.setDefaultDirectory(os.path.expanduser('~')) file_dialog.exec() if file_dialog.getPath() == '': return elif file_dialog.getFilename()[2] == '': for file in os.listdir(file_dialog.getPath()): if file.endswith('.txt') and not file.startswith('.'): file_path = os.path.join(file_dialog.getPath(), file) if file_path not in self.selected_files: self.selected_files[file] = file_path item = QStandardItem(file) item.setCheckable(True) self.file_view_model.appendRow(item) elif file_dialog.getPath() not in self.selected_files: self.selected_files[file_dialog.getFilename()[1] ] = file_dialog.getPath() item = QStandardItem(file_dialog.getFilename()[1]) item.setCheckable(True) self.file_view_model.appendRow(item) <|reserved_special_token_0|> def run(self): if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox. isChecked() or self.rule_based_checkbox.isChecked() or self. machine_learning_checkbox.isChecked()): message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Missing Parameters') message_box.setText( "You haven't selected any methods of sentiment analysis. Please select at least one " + 'method from the list of options.') message_box.exec_() return if self.input_output_box.currentIndex() == 2: message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Select Input') message_box.setText( 'You must be on the "Select Files" page or the "Manual Input" page to run ' + 'an analysis. Please select one of those pages and try again.') message_box.exec_() return else: progress_bar = QtWidgets.QProgressDialog( 'Running Sentiment Analysis...', 'Cancel', 0, 100, self. main_window) progress_bar.setValue(0) progress_bar.setCancelButton(None) progress_bar.setWindowModality(QtCore.Qt.WindowModal) progress_bar.resize(400, 50) progress_bar.show() if self.input_output_box.currentIndex() == 0: sentiment_analyzer = SentimentAnalyzer(self.selected_files, progress_bar, pronoun=self.pronoun_checkbox.isChecked(), lexical=self.lexical_checkbox.isChecked(), rule_based= self.rule_based_checkbox.isChecked(), machine_learning= self.machine_learning_checkbox.isChecked()) else: sentiment_analyzer = SentimentAnalyzer(self.text_input. toPlainText(), progress_bar, pronoun=self. pronoun_checkbox.isChecked(), lexical=self. lexical_checkbox.isChecked(), rule_based=self. rule_based_checkbox.isChecked(), machine_learning=self. machine_learning_checkbox.isChecked()) results = sentiment_analyzer.runAnalyses() progress_bar.close() if results: self.results_content_text.setText(results) self.input_output_box.setTabEnabled(2, True) self.input_output_box.setCurrentIndex(2) else: message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Missing Input') message_box.setText( "You haven't added any input to analyze. Please select one or more files or " + 'input some data manually.') message_box.exec_() return <|reserved_special_token_1|> <|reserved_special_token_0|> class UIMainWindow(object): def __init__(self): font = QtGui.QFont() font.setFamily('Myriad Pro') font.setPointSize(14) self.main_window = QtWidgets.QWidget() self.main_window.setFont(font) self.main_window.setObjectName('main_window') self.main_window.setWindowModality(QtCore.Qt.WindowModal) self.main_window.resize(450, 460) size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) size_policy.setHorizontalStretch(0) size_policy.setVerticalStretch(0) size_policy.setHeightForWidth(self.main_window.sizePolicy(). hasHeightForWidth()) self.main_window.setSizePolicy(size_policy) self.main_window.setMinimumSize(QtCore.QSize(450, 460)) self.main_window.setMaximumSize(QtCore.QSize(450, 460)) self.main_window.setBaseSize(QtCore.QSize(450, 460)) self.branding_icon = QtWidgets.QLabel(self.main_window) self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90)) self.branding_icon.setText('') self.branding_icon.setPixmap(QtGui.QPixmap( '../images/senticompare_logo.png')) self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt. AlignVCenter) self.branding_icon.setObjectName('branding_icon') self.branding_label = QtWidgets.QLabel(self.main_window) self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90)) palette = QtGui.QPalette() brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush) self.branding_label.setPalette(palette) font = QtGui.QFont() font.setFamily('Optima') font.setPointSize(50) self.branding_label.setFont(font) self.branding_label.setObjectName('branding_label') self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window) self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, 430, 50)) self.horizontal_layout_widget_1.setObjectName( 'horizontal_layout_widget_1') self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self. horizontal_layout_widget_1) self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_1.setObjectName('horizontal_layout_1') self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1 ) self.run_button.setObjectName('run_button') self.run_button.clicked.connect(self.run) self.horizontal_layout_1.addWidget(self.run_button) self.quit_button = QtWidgets.QPushButton(self. horizontal_layout_widget_1) self.quit_button.setObjectName('quit_button') self.quit_button.clicked.connect(self.main_window.close) self.horizontal_layout_1.addWidget(self.quit_button) self.select_files_tab = QtWidgets.QWidget() self.select_files_tab.setObjectName('select_files_tab') self.horizontal_layout_widget_2 = QtWidgets.QWidget(self. select_files_tab) self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, 230, 50)) self.horizontal_layout_widget_2.setObjectName( 'horizontal_layout_widget_2') self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self. horizontal_layout_widget_2) self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_2.setObjectName('horizontal_layout_2') font.setFamily('Myriad Pro') font.setPointSize(12) self.input_output_box = QtWidgets.QTabWidget(self.main_window) self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300)) self.input_output_box.setFont(font) self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt. PointingHandCursor)) self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North) self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded) self.input_output_box.setTabsClosable(False) self.input_output_box.setObjectName('input_output_box') self.file_view = QtWidgets.QListView(self.select_files_tab) self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210)) self.file_view.setObjectName('file_view') self.file_view_model = QStandardItemModel(self.file_view) self.file_view.setModel(self.file_view_model) self.file_view.show() self.input_output_box.addTab(self.select_files_tab, '') self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2 ) self.add_button.setFont(font) self.add_button.setObjectName('add_button') self.add_button.clicked.connect(self.selectFiles) self.horizontal_layout_2.addWidget(self.add_button) self.delete_button = QtWidgets.QPushButton(self. horizontal_layout_widget_2) self.delete_button.setFont(font) self.delete_button.setObjectName('delete_button') self.delete_button.clicked.connect(self.removeFiles) self.horizontal_layout_2.addWidget(self.delete_button) self.manual_input_tab = QtWidgets.QWidget() self.manual_input_tab.setObjectName('manual_input_tab') self.text_input = QtWidgets.QTextEdit(self.manual_input_tab) self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.text_input.setObjectName('text_input') self.input_output_box.addTab(self.manual_input_tab, '') self.results_tab = QtWidgets.QWidget() self.results_tab.setObjectName('results_tab') self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab) self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.results_scroll_box.setWidgetResizable(True) self.results_scroll_box.setObjectName('results_scroll_box') self.results_content = QtWidgets.QWidget() self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250)) self.results_content.setObjectName('results_content') self.results_scroll_box.setWidget(self.results_content) self.results_content_text = QtWidgets.QTextEdit(self.results_content) self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250)) self.results_content_text.setReadOnly(True) self.results_content_text.setObjectName('results_content_text') self.input_output_box.addTab(self.results_tab, '') self.input_output_box.setTabEnabled(2, False) font.setPointSize(14) self.group_box_1 = QtWidgets.QGroupBox(self.main_window) self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140)) self.group_box_1.setFont(font) self.group_box_1.setTitle('') self.group_box_1.setAlignment(QtCore.Qt.AlignCenter) self.group_box_1.setFlat(False) self.group_box_1.setCheckable(False) self.group_box_1.setObjectName('group_box_1') self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1) self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141)) self.vertical_layout_widget_1.setObjectName('vertical_layout_widget_1') self.vertical_layout_1 = QtWidgets.QVBoxLayout(self. vertical_layout_widget_1) self.vertical_layout_1.setContentsMargins(0, 0, 0, 0) self.vertical_layout_1.setObjectName('vertical_layout_1') self.pronoun_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.pronoun_checkbox.setFont(font) self.pronoun_checkbox.setObjectName('pronoun_checkbox') self.vertical_layout_1.addWidget(self.pronoun_checkbox) self.lexical_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.lexical_checkbox.setFont(font) self.lexical_checkbox.setObjectName('lexical_checkbox') self.vertical_layout_1.addWidget(self.lexical_checkbox) self.rule_based_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.rule_based_checkbox.setFont(font) self.rule_based_checkbox.setObjectName('rule_based_checkbox') self.vertical_layout_1.addWidget(self.rule_based_checkbox) self.machine_learning_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.machine_learning_checkbox.setFont(font) self.machine_learning_checkbox.setObjectName( 'machine_learning_checkbox') self.vertical_layout_1.addWidget(self.machine_learning_checkbox) self.help_scroll_box = QtWidgets.QScrollArea(self.main_window) self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140)) self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel) self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken) self.help_scroll_box.setWidgetResizable(True) self.help_scroll_box.setObjectName('help_scroll_box') self.help_content = QtWidgets.QWidget() self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138)) self.help_content.setObjectName('help_content') self.help_scroll_box.setWidget(self.help_content) self.selected_files = {} self.input_output_box.setCurrentIndex(0) self.retranslateUI() QtCore.QMetaObject.connectSlotsByName(self.main_window) def retranslateUI(self): _translate = QtCore.QCoreApplication.translate self.main_window.setWindowTitle(_translate('main_window', 'SentiCompare')) self.add_button.setText(_translate('main_window', 'Add')) self.delete_button.setText(_translate('main_window', 'Delete')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .select_files_tab), _translate('main_window', 'Select Files')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .manual_input_tab), _translate('main_window', 'Manual Input')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .results_tab), _translate('main_window', 'Results')) self.run_button.setText(_translate('main_window', 'Run')) self.quit_button.setText(_translate('main_window', 'Quit')) self.pronoun_checkbox.setText(_translate('main_window', 'Pronoun Usage')) self.lexical_checkbox.setText(_translate('main_window', 'Lexical')) self.rule_based_checkbox.setText(_translate('main_window', 'Rule Based')) self.machine_learning_checkbox.setText(_translate('main_window', 'Machine Learning')) self.branding_label.setText(_translate('main_window', 'SentiCompare')) def showWindow(self): self.main_window.show() def selectFiles(self): file_dialog = FileDialog(self.main_window) file_dialog.setFilters(['Text files (*.txt)']) file_dialog.setDefaultFilterIndex = 0 file_dialog.setDefaultDirectory(os.path.expanduser('~')) file_dialog.exec() if file_dialog.getPath() == '': return elif file_dialog.getFilename()[2] == '': for file in os.listdir(file_dialog.getPath()): if file.endswith('.txt') and not file.startswith('.'): file_path = os.path.join(file_dialog.getPath(), file) if file_path not in self.selected_files: self.selected_files[file] = file_path item = QStandardItem(file) item.setCheckable(True) self.file_view_model.appendRow(item) elif file_dialog.getPath() not in self.selected_files: self.selected_files[file_dialog.getFilename()[1] ] = file_dialog.getPath() item = QStandardItem(file_dialog.getFilename()[1]) item.setCheckable(True) self.file_view_model.appendRow(item) def removeFiles(self): for i in range(self.file_view_model.rowCount() - 1, -1, -1): if self.file_view_model.item(i).checkState(): filename = self.file_view_model.item(i).text() del self.selected_files[filename] self.file_view_model.removeRow(i) def run(self): if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox. isChecked() or self.rule_based_checkbox.isChecked() or self. machine_learning_checkbox.isChecked()): message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Missing Parameters') message_box.setText( "You haven't selected any methods of sentiment analysis. Please select at least one " + 'method from the list of options.') message_box.exec_() return if self.input_output_box.currentIndex() == 2: message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Select Input') message_box.setText( 'You must be on the "Select Files" page or the "Manual Input" page to run ' + 'an analysis. Please select one of those pages and try again.') message_box.exec_() return else: progress_bar = QtWidgets.QProgressDialog( 'Running Sentiment Analysis...', 'Cancel', 0, 100, self. main_window) progress_bar.setValue(0) progress_bar.setCancelButton(None) progress_bar.setWindowModality(QtCore.Qt.WindowModal) progress_bar.resize(400, 50) progress_bar.show() if self.input_output_box.currentIndex() == 0: sentiment_analyzer = SentimentAnalyzer(self.selected_files, progress_bar, pronoun=self.pronoun_checkbox.isChecked(), lexical=self.lexical_checkbox.isChecked(), rule_based= self.rule_based_checkbox.isChecked(), machine_learning= self.machine_learning_checkbox.isChecked()) else: sentiment_analyzer = SentimentAnalyzer(self.text_input. toPlainText(), progress_bar, pronoun=self. pronoun_checkbox.isChecked(), lexical=self. lexical_checkbox.isChecked(), rule_based=self. rule_based_checkbox.isChecked(), machine_learning=self. machine_learning_checkbox.isChecked()) results = sentiment_analyzer.runAnalyses() progress_bar.close() if results: self.results_content_text.setText(results) self.input_output_box.setTabEnabled(2, True) self.input_output_box.setCurrentIndex(2) else: message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Missing Input') message_box.setText( "You haven't added any input to analyze. Please select one or more files or " + 'input some data manually.') message_box.exec_() return <|reserved_special_token_1|> import os from PyQt5 import QtCore, QtGui, QtWidgets from PyQt5.QtGui import QStandardItem, QStandardItemModel from PyQt5.QtWidgets import QMessageBox from src import FileDialog, SentimentAnalyzer class UIMainWindow(object): def __init__(self): font = QtGui.QFont() font.setFamily('Myriad Pro') font.setPointSize(14) self.main_window = QtWidgets.QWidget() self.main_window.setFont(font) self.main_window.setObjectName('main_window') self.main_window.setWindowModality(QtCore.Qt.WindowModal) self.main_window.resize(450, 460) size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) size_policy.setHorizontalStretch(0) size_policy.setVerticalStretch(0) size_policy.setHeightForWidth(self.main_window.sizePolicy(). hasHeightForWidth()) self.main_window.setSizePolicy(size_policy) self.main_window.setMinimumSize(QtCore.QSize(450, 460)) self.main_window.setMaximumSize(QtCore.QSize(450, 460)) self.main_window.setBaseSize(QtCore.QSize(450, 460)) self.branding_icon = QtWidgets.QLabel(self.main_window) self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90)) self.branding_icon.setText('') self.branding_icon.setPixmap(QtGui.QPixmap( '../images/senticompare_logo.png')) self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt. AlignVCenter) self.branding_icon.setObjectName('branding_icon') self.branding_label = QtWidgets.QLabel(self.main_window) self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90)) palette = QtGui.QPalette() brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush) self.branding_label.setPalette(palette) font = QtGui.QFont() font.setFamily('Optima') font.setPointSize(50) self.branding_label.setFont(font) self.branding_label.setObjectName('branding_label') self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window) self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, 430, 50)) self.horizontal_layout_widget_1.setObjectName( 'horizontal_layout_widget_1') self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self. horizontal_layout_widget_1) self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_1.setObjectName('horizontal_layout_1') self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1 ) self.run_button.setObjectName('run_button') self.run_button.clicked.connect(self.run) self.horizontal_layout_1.addWidget(self.run_button) self.quit_button = QtWidgets.QPushButton(self. horizontal_layout_widget_1) self.quit_button.setObjectName('quit_button') self.quit_button.clicked.connect(self.main_window.close) self.horizontal_layout_1.addWidget(self.quit_button) self.select_files_tab = QtWidgets.QWidget() self.select_files_tab.setObjectName('select_files_tab') self.horizontal_layout_widget_2 = QtWidgets.QWidget(self. select_files_tab) self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, 230, 50)) self.horizontal_layout_widget_2.setObjectName( 'horizontal_layout_widget_2') self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self. horizontal_layout_widget_2) self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_2.setObjectName('horizontal_layout_2') font.setFamily('Myriad Pro') font.setPointSize(12) self.input_output_box = QtWidgets.QTabWidget(self.main_window) self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300)) self.input_output_box.setFont(font) self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt. PointingHandCursor)) self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North) self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded) self.input_output_box.setTabsClosable(False) self.input_output_box.setObjectName('input_output_box') self.file_view = QtWidgets.QListView(self.select_files_tab) self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210)) self.file_view.setObjectName('file_view') self.file_view_model = QStandardItemModel(self.file_view) self.file_view.setModel(self.file_view_model) self.file_view.show() self.input_output_box.addTab(self.select_files_tab, '') self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2 ) self.add_button.setFont(font) self.add_button.setObjectName('add_button') self.add_button.clicked.connect(self.selectFiles) self.horizontal_layout_2.addWidget(self.add_button) self.delete_button = QtWidgets.QPushButton(self. horizontal_layout_widget_2) self.delete_button.setFont(font) self.delete_button.setObjectName('delete_button') self.delete_button.clicked.connect(self.removeFiles) self.horizontal_layout_2.addWidget(self.delete_button) self.manual_input_tab = QtWidgets.QWidget() self.manual_input_tab.setObjectName('manual_input_tab') self.text_input = QtWidgets.QTextEdit(self.manual_input_tab) self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.text_input.setObjectName('text_input') self.input_output_box.addTab(self.manual_input_tab, '') self.results_tab = QtWidgets.QWidget() self.results_tab.setObjectName('results_tab') self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab) self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.results_scroll_box.setWidgetResizable(True) self.results_scroll_box.setObjectName('results_scroll_box') self.results_content = QtWidgets.QWidget() self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250)) self.results_content.setObjectName('results_content') self.results_scroll_box.setWidget(self.results_content) self.results_content_text = QtWidgets.QTextEdit(self.results_content) self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250)) self.results_content_text.setReadOnly(True) self.results_content_text.setObjectName('results_content_text') self.input_output_box.addTab(self.results_tab, '') self.input_output_box.setTabEnabled(2, False) font.setPointSize(14) self.group_box_1 = QtWidgets.QGroupBox(self.main_window) self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140)) self.group_box_1.setFont(font) self.group_box_1.setTitle('') self.group_box_1.setAlignment(QtCore.Qt.AlignCenter) self.group_box_1.setFlat(False) self.group_box_1.setCheckable(False) self.group_box_1.setObjectName('group_box_1') self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1) self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141)) self.vertical_layout_widget_1.setObjectName('vertical_layout_widget_1') self.vertical_layout_1 = QtWidgets.QVBoxLayout(self. vertical_layout_widget_1) self.vertical_layout_1.setContentsMargins(0, 0, 0, 0) self.vertical_layout_1.setObjectName('vertical_layout_1') self.pronoun_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.pronoun_checkbox.setFont(font) self.pronoun_checkbox.setObjectName('pronoun_checkbox') self.vertical_layout_1.addWidget(self.pronoun_checkbox) self.lexical_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.lexical_checkbox.setFont(font) self.lexical_checkbox.setObjectName('lexical_checkbox') self.vertical_layout_1.addWidget(self.lexical_checkbox) self.rule_based_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.rule_based_checkbox.setFont(font) self.rule_based_checkbox.setObjectName('rule_based_checkbox') self.vertical_layout_1.addWidget(self.rule_based_checkbox) self.machine_learning_checkbox = QtWidgets.QCheckBox(self. vertical_layout_widget_1) self.machine_learning_checkbox.setFont(font) self.machine_learning_checkbox.setObjectName( 'machine_learning_checkbox') self.vertical_layout_1.addWidget(self.machine_learning_checkbox) self.help_scroll_box = QtWidgets.QScrollArea(self.main_window) self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140)) self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel) self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken) self.help_scroll_box.setWidgetResizable(True) self.help_scroll_box.setObjectName('help_scroll_box') self.help_content = QtWidgets.QWidget() self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138)) self.help_content.setObjectName('help_content') self.help_scroll_box.setWidget(self.help_content) self.selected_files = {} self.input_output_box.setCurrentIndex(0) self.retranslateUI() QtCore.QMetaObject.connectSlotsByName(self.main_window) def retranslateUI(self): _translate = QtCore.QCoreApplication.translate self.main_window.setWindowTitle(_translate('main_window', 'SentiCompare')) self.add_button.setText(_translate('main_window', 'Add')) self.delete_button.setText(_translate('main_window', 'Delete')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .select_files_tab), _translate('main_window', 'Select Files')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .manual_input_tab), _translate('main_window', 'Manual Input')) self.input_output_box.setTabText(self.input_output_box.indexOf(self .results_tab), _translate('main_window', 'Results')) self.run_button.setText(_translate('main_window', 'Run')) self.quit_button.setText(_translate('main_window', 'Quit')) self.pronoun_checkbox.setText(_translate('main_window', 'Pronoun Usage')) self.lexical_checkbox.setText(_translate('main_window', 'Lexical')) self.rule_based_checkbox.setText(_translate('main_window', 'Rule Based')) self.machine_learning_checkbox.setText(_translate('main_window', 'Machine Learning')) self.branding_label.setText(_translate('main_window', 'SentiCompare')) def showWindow(self): self.main_window.show() def selectFiles(self): file_dialog = FileDialog(self.main_window) file_dialog.setFilters(['Text files (*.txt)']) file_dialog.setDefaultFilterIndex = 0 file_dialog.setDefaultDirectory(os.path.expanduser('~')) file_dialog.exec() if file_dialog.getPath() == '': return elif file_dialog.getFilename()[2] == '': for file in os.listdir(file_dialog.getPath()): if file.endswith('.txt') and not file.startswith('.'): file_path = os.path.join(file_dialog.getPath(), file) if file_path not in self.selected_files: self.selected_files[file] = file_path item = QStandardItem(file) item.setCheckable(True) self.file_view_model.appendRow(item) elif file_dialog.getPath() not in self.selected_files: self.selected_files[file_dialog.getFilename()[1] ] = file_dialog.getPath() item = QStandardItem(file_dialog.getFilename()[1]) item.setCheckable(True) self.file_view_model.appendRow(item) def removeFiles(self): for i in range(self.file_view_model.rowCount() - 1, -1, -1): if self.file_view_model.item(i).checkState(): filename = self.file_view_model.item(i).text() del self.selected_files[filename] self.file_view_model.removeRow(i) def run(self): if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox. isChecked() or self.rule_based_checkbox.isChecked() or self. machine_learning_checkbox.isChecked()): message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Missing Parameters') message_box.setText( "You haven't selected any methods of sentiment analysis. Please select at least one " + 'method from the list of options.') message_box.exec_() return if self.input_output_box.currentIndex() == 2: message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Select Input') message_box.setText( 'You must be on the "Select Files" page or the "Manual Input" page to run ' + 'an analysis. Please select one of those pages and try again.') message_box.exec_() return else: progress_bar = QtWidgets.QProgressDialog( 'Running Sentiment Analysis...', 'Cancel', 0, 100, self. main_window) progress_bar.setValue(0) progress_bar.setCancelButton(None) progress_bar.setWindowModality(QtCore.Qt.WindowModal) progress_bar.resize(400, 50) progress_bar.show() if self.input_output_box.currentIndex() == 0: sentiment_analyzer = SentimentAnalyzer(self.selected_files, progress_bar, pronoun=self.pronoun_checkbox.isChecked(), lexical=self.lexical_checkbox.isChecked(), rule_based= self.rule_based_checkbox.isChecked(), machine_learning= self.machine_learning_checkbox.isChecked()) else: sentiment_analyzer = SentimentAnalyzer(self.text_input. toPlainText(), progress_bar, pronoun=self. pronoun_checkbox.isChecked(), lexical=self. lexical_checkbox.isChecked(), rule_based=self. rule_based_checkbox.isChecked(), machine_learning=self. machine_learning_checkbox.isChecked()) results = sentiment_analyzer.runAnalyses() progress_bar.close() if results: self.results_content_text.setText(results) self.input_output_box.setTabEnabled(2, True) self.input_output_box.setCurrentIndex(2) else: message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle('Missing Input') message_box.setText( "You haven't added any input to analyze. Please select one or more files or " + 'input some data manually.') message_box.exec_() return <|reserved_special_token_1|> # ================================================== # # MAIN WINDOW # # ================================================== # # Author: Brady Hammond # # Created: 11/21/2017 # # Last Edited: N/A # # Last Edited By: N/A # # ================================================== # #                     FILE SETUP                     # # ================================================== # # Import statements import os from PyQt5 import QtCore, QtGui, QtWidgets from PyQt5.QtGui import QStandardItem, QStandardItemModel from PyQt5.QtWidgets import QMessageBox from src import FileDialog, SentimentAnalyzer # ================================================== # #                 CLASS DEFINITION               # # ================================================== # # UIMainWindow class definition class UIMainWindow(object): # Define __init__ function def __init__(self): # Create main window font = QtGui.QFont() font.setFamily("Myriad Pro") font.setPointSize(14) self.main_window = QtWidgets.QWidget() self.main_window.setFont(font) self.main_window.setObjectName("main_window") self.main_window.setWindowModality(QtCore.Qt.WindowModal) self.main_window.resize(450, 460) size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) size_policy.setHorizontalStretch(0) size_policy.setVerticalStretch(0) size_policy.setHeightForWidth(self.main_window.sizePolicy().hasHeightForWidth()) self.main_window.setSizePolicy(size_policy) self.main_window.setMinimumSize(QtCore.QSize(450, 460)) self.main_window.setMaximumSize(QtCore.QSize(450, 460)) self.main_window.setBaseSize(QtCore.QSize(450, 460)) # Create branding icon self.branding_icon = QtWidgets.QLabel(self.main_window) self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90)) self.branding_icon.setText("") self.branding_icon.setPixmap(QtGui.QPixmap("../images/senticompare_logo.png")) self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt.AlignVCenter) self.branding_icon.setObjectName("branding_icon") # Create branding label self.branding_label = QtWidgets.QLabel(self.main_window) self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90)) palette = QtGui.QPalette() brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(81, 108, 146)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(0, 0, 0)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush) brush = QtGui.QBrush(QtGui.QColor(127, 127, 127)) brush.setStyle(QtCore.Qt.SolidPattern) palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush) self.branding_label.setPalette(palette) font = QtGui.QFont() font.setFamily("Optima") font.setPointSize(50) self.branding_label.setFont(font) self.branding_label.setObjectName("branding_label") # Create first horizontal layout self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window) self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, 430, 50)) self.horizontal_layout_widget_1.setObjectName("horizontal_layout_widget_1") self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self.horizontal_layout_widget_1) self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_1.setObjectName("horizontal_layout_1") # Create run button self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1) self.run_button.setObjectName("run_button") self.run_button.clicked.connect(self.run) # Add run button to first horizontal layout self.horizontal_layout_1.addWidget(self.run_button) # Create quit button self.quit_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1) self.quit_button.setObjectName("quit_button") self.quit_button.clicked.connect(self.main_window.close) # Add quit button to first horizontal layout self.horizontal_layout_1.addWidget(self.quit_button) # Create file selection tab self.select_files_tab = QtWidgets.QWidget() self.select_files_tab.setObjectName("select_files_tab") # Create second horizontal layout self.horizontal_layout_widget_2 = QtWidgets.QWidget(self.select_files_tab) self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, 230, 50)) self.horizontal_layout_widget_2.setObjectName("horizontal_layout_widget_2") self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self.horizontal_layout_widget_2) self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0) self.horizontal_layout_2.setObjectName("horizontal_layout_2") # Create input/output tab window font.setFamily("Myriad Pro") font.setPointSize(12) self.input_output_box = QtWidgets.QTabWidget(self.main_window) self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300)) self.input_output_box.setFont(font) self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor)) self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North) self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded) self.input_output_box.setTabsClosable(False) self.input_output_box.setObjectName("input_output_box") # Create file view self.file_view = QtWidgets.QListView(self.select_files_tab) self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210)) self.file_view.setObjectName("file_view") # Create file view model self.file_view_model = QStandardItemModel(self.file_view) # Add file view model to file view self.file_view.setModel(self.file_view_model) # Show file view self.file_view.show() # Add file selection tab to input/output tab window self.input_output_box.addTab(self.select_files_tab, "") # Create add button self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2) self.add_button.setFont(font) self.add_button.setObjectName("add_button") self.add_button.clicked.connect(self.selectFiles) # Add add button to second horizontal layout self.horizontal_layout_2.addWidget(self.add_button) # Create delete button self.delete_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2) self.delete_button.setFont(font) self.delete_button.setObjectName("delete_button") self.delete_button.clicked.connect(self.removeFiles) # Add delete button to second horizontal layout self.horizontal_layout_2.addWidget(self.delete_button) # Create manual input tab self.manual_input_tab = QtWidgets.QWidget() self.manual_input_tab.setObjectName("manual_input_tab") # Create text input self.text_input = QtWidgets.QTextEdit(self.manual_input_tab) self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.text_input.setObjectName("text_input") # Add text input to manual input tab self.input_output_box.addTab(self.manual_input_tab, "") # Create results tab self.results_tab = QtWidgets.QWidget() self.results_tab.setObjectName("results_tab") # Create results scroll box self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab) self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250)) self.results_scroll_box.setWidgetResizable(True) self.results_scroll_box.setObjectName("results_scroll_box") # Create results content self.results_content = QtWidgets.QWidget() self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250)) self.results_content.setObjectName("results_content") self.results_scroll_box.setWidget(self.results_content) # Create results content text self.results_content_text = QtWidgets.QTextEdit(self.results_content) self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250)) self.results_content_text.setReadOnly(True) self.results_content_text.setObjectName("results_content_text") # Add results tab to input/output tab window self.input_output_box.addTab(self.results_tab, "") # Disable results tab self.input_output_box.setTabEnabled(2, False) # Create first group box font.setPointSize(14) self.group_box_1 = QtWidgets.QGroupBox(self.main_window) self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140)) self.group_box_1.setFont(font) self.group_box_1.setTitle("") self.group_box_1.setAlignment(QtCore.Qt.AlignCenter) self.group_box_1.setFlat(False) self.group_box_1.setCheckable(False) self.group_box_1.setObjectName("group_box_1") # Create first vertical layout self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1) self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141)) self.vertical_layout_widget_1.setObjectName("vertical_layout_widget_1") self.vertical_layout_1 = QtWidgets.QVBoxLayout(self.vertical_layout_widget_1) self.vertical_layout_1.setContentsMargins(0, 0, 0, 0) self.vertical_layout_1.setObjectName("vertical_layout_1") # Create pronoun checkbox self.pronoun_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1) self.pronoun_checkbox.setFont(font) self.pronoun_checkbox.setObjectName("pronoun_checkbox") # Add pronoun checkbox to first vertical layout self.vertical_layout_1.addWidget(self.pronoun_checkbox) # Create lexical checkbox self.lexical_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1) self.lexical_checkbox.setFont(font) self.lexical_checkbox.setObjectName("lexical_checkbox") # Add lexical checkbox to first vertical layout self.vertical_layout_1.addWidget(self.lexical_checkbox) # Create rule based checkbox self.rule_based_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1) self.rule_based_checkbox.setFont(font) self.rule_based_checkbox.setObjectName("rule_based_checkbox") # Add rule_based checkbox to first vertical layout self.vertical_layout_1.addWidget(self.rule_based_checkbox) # Create machine learning checkbox self.machine_learning_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1) self.machine_learning_checkbox.setFont(font) self.machine_learning_checkbox.setObjectName("machine_learning_checkbox") # Add machine learning checkbox to first vertical layout self.vertical_layout_1.addWidget(self.machine_learning_checkbox) # Create help scroll box self.help_scroll_box = QtWidgets.QScrollArea(self.main_window) self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140)) self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel) self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken) self.help_scroll_box.setWidgetResizable(True) self.help_scroll_box.setObjectName("help_scroll_box") # Create help content self.help_content = QtWidgets.QWidget() self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138)) self.help_content.setObjectName("help_content") self.help_scroll_box.setWidget(self.help_content) # Create selected files variable self.selected_files = {} # Set current tab self.input_output_box.setCurrentIndex(0) # Retranslate UI self.retranslateUI() # Connect UI slots QtCore.QMetaObject.connectSlotsByName(self.main_window) # ============================================== # # Define retranslateUI function def retranslateUI(self): # Add text to ui elements _translate = QtCore.QCoreApplication.translate self.main_window.setWindowTitle(_translate("main_window", "SentiCompare")) self.add_button.setText(_translate("main_window", "Add")) self.delete_button.setText(_translate("main_window", "Delete")) self.input_output_box.setTabText(self.input_output_box.indexOf(self.select_files_tab), _translate("main_window", "Select Files")) self.input_output_box.setTabText(self.input_output_box.indexOf(self.manual_input_tab), _translate("main_window", "Manual Input")) self.input_output_box.setTabText(self.input_output_box.indexOf(self.results_tab), _translate("main_window", "Results")) self.run_button.setText(_translate("main_window", "Run")) self.quit_button.setText(_translate("main_window", "Quit")) self.pronoun_checkbox.setText(_translate("main_window", "Pronoun Usage")) self.lexical_checkbox.setText(_translate("main_window", "Lexical")) self.rule_based_checkbox.setText(_translate("main_window", "Rule Based")) self.machine_learning_checkbox.setText(_translate("main_window", "Machine Learning")) self.branding_label.setText(_translate("main_window", "SentiCompare")) # ============================================== # # Define showWindow function def showWindow(self): self.main_window.show() # ============================================== # # Define selectFiles function def selectFiles(self): # Create file dialog file_dialog = FileDialog(self.main_window) file_dialog.setFilters(["Text files (*.txt)"]) file_dialog.setDefaultFilterIndex = 0 file_dialog.setDefaultDirectory(os.path.expanduser('~')) file_dialog.exec() # Return if nothing was selected if file_dialog.getPath() == '': return # Add files from selected directory to file list elif file_dialog.getFilename()[2] == '': for file in os.listdir(file_dialog.getPath()): if file.endswith('.txt') and not file.startswith('.'): file_path = os.path.join(file_dialog.getPath(), file) if file_path not in self.selected_files: self.selected_files[file] = file_path item = QStandardItem(file) item.setCheckable(True) self.file_view_model.appendRow(item) # Add selected file to list else: if file_dialog.getPath() not in self.selected_files: self.selected_files[file_dialog.getFilename()[1]] = file_dialog.getPath() item = QStandardItem(file_dialog.getFilename()[1]) item.setCheckable(True) self.file_view_model.appendRow(item) # ============================================== # # Define removeFiles function def removeFiles(self): # Remove all checked files for i in range(self.file_view_model.rowCount() - 1, -1, -1): if self.file_view_model.item(i).checkState(): filename = self.file_view_model.item(i).text() del self.selected_files[filename] self.file_view_model.removeRow(i) # ============================================== # # Define run function def run(self): # Check if an analysis method is selected if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox.isChecked() or self.rule_based_checkbox.isChecked() or self.machine_learning_checkbox.isChecked()): # Create and show an error message message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle("Missing Parameters") message_box.setText("You haven't selected any methods of sentiment analysis. Please select at least one " + "method from the list of options.") message_box.exec_() return # Check if the current tab is valid if self.input_output_box.currentIndex() == 2: # Create and show error message message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle("Select Input") message_box.setText("You must be on the \"Select Files\" page or the \"Manual Input\" page to run " + "an analysis. Please select one of those pages and try again.") message_box.exec_() return else: progress_bar = QtWidgets.QProgressDialog("Running Sentiment Analysis...", "Cancel", 0, 100, self.main_window) progress_bar.setValue(0) progress_bar.setCancelButton(None) progress_bar.setWindowModality(QtCore.Qt.WindowModal) progress_bar.resize(400, 50) progress_bar.show() # Analyze selected files if self.input_output_box.currentIndex() == 0: sentiment_analyzer = SentimentAnalyzer(self.selected_files, progress_bar, pronoun=self.pronoun_checkbox.isChecked(), lexical=self.lexical_checkbox.isChecked(), rule_based=self.rule_based_checkbox.isChecked(), machine_learning=self.machine_learning_checkbox.isChecked()) # Analyze manual input else: sentiment_analyzer = SentimentAnalyzer(self.text_input.toPlainText(), progress_bar, pronoun=self.pronoun_checkbox.isChecked(), lexical=self.lexical_checkbox.isChecked(), rule_based=self.rule_based_checkbox.isChecked(), machine_learning=self.machine_learning_checkbox.isChecked()) results = sentiment_analyzer.runAnalyses() progress_bar.close() if results: self.results_content_text.setText(results) self.input_output_box.setTabEnabled(2, True) self.input_output_box.setCurrentIndex(2) else: message_box = QMessageBox() message_box.setIcon(QMessageBox.Warning) message_box.setWindowTitle("Missing Input") message_box.setText("You haven't added any input to analyze. Please select one or more files or " + "input some data manually.") message_box.exec_() return # ================================================== # # EOF # # ================================================== #
flexible
{ "blob_id": "a555226b14223dca688d10b811eb36fb229360ce", "index": 2457, "step-1": "<mask token>\n\n\nclass UIMainWindow(object):\n <mask token>\n\n def retranslateUI(self):\n _translate = QtCore.QCoreApplication.translate\n self.main_window.setWindowTitle(_translate('main_window',\n 'SentiCompare'))\n self.add_button.setText(_translate('main_window', 'Add'))\n self.delete_button.setText(_translate('main_window', 'Delete'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .select_files_tab), _translate('main_window', 'Select Files'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .manual_input_tab), _translate('main_window', 'Manual Input'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .results_tab), _translate('main_window', 'Results'))\n self.run_button.setText(_translate('main_window', 'Run'))\n self.quit_button.setText(_translate('main_window', 'Quit'))\n self.pronoun_checkbox.setText(_translate('main_window',\n 'Pronoun Usage'))\n self.lexical_checkbox.setText(_translate('main_window', 'Lexical'))\n self.rule_based_checkbox.setText(_translate('main_window',\n 'Rule Based'))\n self.machine_learning_checkbox.setText(_translate('main_window',\n 'Machine Learning'))\n self.branding_label.setText(_translate('main_window', 'SentiCompare'))\n\n def showWindow(self):\n self.main_window.show()\n\n def selectFiles(self):\n file_dialog = FileDialog(self.main_window)\n file_dialog.setFilters(['Text files (*.txt)'])\n file_dialog.setDefaultFilterIndex = 0\n file_dialog.setDefaultDirectory(os.path.expanduser('~'))\n file_dialog.exec()\n if file_dialog.getPath() == '':\n return\n elif file_dialog.getFilename()[2] == '':\n for file in os.listdir(file_dialog.getPath()):\n if file.endswith('.txt') and not file.startswith('.'):\n file_path = os.path.join(file_dialog.getPath(), file)\n if file_path not in self.selected_files:\n self.selected_files[file] = file_path\n item = QStandardItem(file)\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n elif file_dialog.getPath() not in self.selected_files:\n self.selected_files[file_dialog.getFilename()[1]\n ] = file_dialog.getPath()\n item = QStandardItem(file_dialog.getFilename()[1])\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass UIMainWindow(object):\n\n def __init__(self):\n font = QtGui.QFont()\n font.setFamily('Myriad Pro')\n font.setPointSize(14)\n self.main_window = QtWidgets.QWidget()\n self.main_window.setFont(font)\n self.main_window.setObjectName('main_window')\n self.main_window.setWindowModality(QtCore.Qt.WindowModal)\n self.main_window.resize(450, 460)\n size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed,\n QtWidgets.QSizePolicy.Fixed)\n size_policy.setHorizontalStretch(0)\n size_policy.setVerticalStretch(0)\n size_policy.setHeightForWidth(self.main_window.sizePolicy().\n hasHeightForWidth())\n self.main_window.setSizePolicy(size_policy)\n self.main_window.setMinimumSize(QtCore.QSize(450, 460))\n self.main_window.setMaximumSize(QtCore.QSize(450, 460))\n self.main_window.setBaseSize(QtCore.QSize(450, 460))\n self.branding_icon = QtWidgets.QLabel(self.main_window)\n self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90))\n self.branding_icon.setText('')\n self.branding_icon.setPixmap(QtGui.QPixmap(\n '../images/senticompare_logo.png'))\n self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt.\n AlignVCenter)\n self.branding_icon.setObjectName('branding_icon')\n self.branding_label = QtWidgets.QLabel(self.main_window)\n self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90))\n palette = QtGui.QPalette()\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)\n self.branding_label.setPalette(palette)\n font = QtGui.QFont()\n font.setFamily('Optima')\n font.setPointSize(50)\n self.branding_label.setFont(font)\n self.branding_label.setObjectName('branding_label')\n self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window)\n self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, \n 430, 50))\n self.horizontal_layout_widget_1.setObjectName(\n 'horizontal_layout_widget_1')\n self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self.\n horizontal_layout_widget_1)\n self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_1.setObjectName('horizontal_layout_1')\n self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1\n )\n self.run_button.setObjectName('run_button')\n self.run_button.clicked.connect(self.run)\n self.horizontal_layout_1.addWidget(self.run_button)\n self.quit_button = QtWidgets.QPushButton(self.\n horizontal_layout_widget_1)\n self.quit_button.setObjectName('quit_button')\n self.quit_button.clicked.connect(self.main_window.close)\n self.horizontal_layout_1.addWidget(self.quit_button)\n self.select_files_tab = QtWidgets.QWidget()\n self.select_files_tab.setObjectName('select_files_tab')\n self.horizontal_layout_widget_2 = QtWidgets.QWidget(self.\n select_files_tab)\n self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, \n 230, 50))\n self.horizontal_layout_widget_2.setObjectName(\n 'horizontal_layout_widget_2')\n self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self.\n horizontal_layout_widget_2)\n self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_2.setObjectName('horizontal_layout_2')\n font.setFamily('Myriad Pro')\n font.setPointSize(12)\n self.input_output_box = QtWidgets.QTabWidget(self.main_window)\n self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300))\n self.input_output_box.setFont(font)\n self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt.\n PointingHandCursor))\n self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North)\n self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded)\n self.input_output_box.setTabsClosable(False)\n self.input_output_box.setObjectName('input_output_box')\n self.file_view = QtWidgets.QListView(self.select_files_tab)\n self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210))\n self.file_view.setObjectName('file_view')\n self.file_view_model = QStandardItemModel(self.file_view)\n self.file_view.setModel(self.file_view_model)\n self.file_view.show()\n self.input_output_box.addTab(self.select_files_tab, '')\n self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2\n )\n self.add_button.setFont(font)\n self.add_button.setObjectName('add_button')\n self.add_button.clicked.connect(self.selectFiles)\n self.horizontal_layout_2.addWidget(self.add_button)\n self.delete_button = QtWidgets.QPushButton(self.\n horizontal_layout_widget_2)\n self.delete_button.setFont(font)\n self.delete_button.setObjectName('delete_button')\n self.delete_button.clicked.connect(self.removeFiles)\n self.horizontal_layout_2.addWidget(self.delete_button)\n self.manual_input_tab = QtWidgets.QWidget()\n self.manual_input_tab.setObjectName('manual_input_tab')\n self.text_input = QtWidgets.QTextEdit(self.manual_input_tab)\n self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.text_input.setObjectName('text_input')\n self.input_output_box.addTab(self.manual_input_tab, '')\n self.results_tab = QtWidgets.QWidget()\n self.results_tab.setObjectName('results_tab')\n self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab)\n self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.results_scroll_box.setWidgetResizable(True)\n self.results_scroll_box.setObjectName('results_scroll_box')\n self.results_content = QtWidgets.QWidget()\n self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250))\n self.results_content.setObjectName('results_content')\n self.results_scroll_box.setWidget(self.results_content)\n self.results_content_text = QtWidgets.QTextEdit(self.results_content)\n self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250))\n self.results_content_text.setReadOnly(True)\n self.results_content_text.setObjectName('results_content_text')\n self.input_output_box.addTab(self.results_tab, '')\n self.input_output_box.setTabEnabled(2, False)\n font.setPointSize(14)\n self.group_box_1 = QtWidgets.QGroupBox(self.main_window)\n self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140))\n self.group_box_1.setFont(font)\n self.group_box_1.setTitle('')\n self.group_box_1.setAlignment(QtCore.Qt.AlignCenter)\n self.group_box_1.setFlat(False)\n self.group_box_1.setCheckable(False)\n self.group_box_1.setObjectName('group_box_1')\n self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1)\n self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141))\n self.vertical_layout_widget_1.setObjectName('vertical_layout_widget_1')\n self.vertical_layout_1 = QtWidgets.QVBoxLayout(self.\n vertical_layout_widget_1)\n self.vertical_layout_1.setContentsMargins(0, 0, 0, 0)\n self.vertical_layout_1.setObjectName('vertical_layout_1')\n self.pronoun_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.pronoun_checkbox.setFont(font)\n self.pronoun_checkbox.setObjectName('pronoun_checkbox')\n self.vertical_layout_1.addWidget(self.pronoun_checkbox)\n self.lexical_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.lexical_checkbox.setFont(font)\n self.lexical_checkbox.setObjectName('lexical_checkbox')\n self.vertical_layout_1.addWidget(self.lexical_checkbox)\n self.rule_based_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.rule_based_checkbox.setFont(font)\n self.rule_based_checkbox.setObjectName('rule_based_checkbox')\n self.vertical_layout_1.addWidget(self.rule_based_checkbox)\n self.machine_learning_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.machine_learning_checkbox.setFont(font)\n self.machine_learning_checkbox.setObjectName(\n 'machine_learning_checkbox')\n self.vertical_layout_1.addWidget(self.machine_learning_checkbox)\n self.help_scroll_box = QtWidgets.QScrollArea(self.main_window)\n self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140))\n self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.help_scroll_box.setWidgetResizable(True)\n self.help_scroll_box.setObjectName('help_scroll_box')\n self.help_content = QtWidgets.QWidget()\n self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138))\n self.help_content.setObjectName('help_content')\n self.help_scroll_box.setWidget(self.help_content)\n self.selected_files = {}\n self.input_output_box.setCurrentIndex(0)\n self.retranslateUI()\n QtCore.QMetaObject.connectSlotsByName(self.main_window)\n\n def retranslateUI(self):\n _translate = QtCore.QCoreApplication.translate\n self.main_window.setWindowTitle(_translate('main_window',\n 'SentiCompare'))\n self.add_button.setText(_translate('main_window', 'Add'))\n self.delete_button.setText(_translate('main_window', 'Delete'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .select_files_tab), _translate('main_window', 'Select Files'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .manual_input_tab), _translate('main_window', 'Manual Input'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .results_tab), _translate('main_window', 'Results'))\n self.run_button.setText(_translate('main_window', 'Run'))\n self.quit_button.setText(_translate('main_window', 'Quit'))\n self.pronoun_checkbox.setText(_translate('main_window',\n 'Pronoun Usage'))\n self.lexical_checkbox.setText(_translate('main_window', 'Lexical'))\n self.rule_based_checkbox.setText(_translate('main_window',\n 'Rule Based'))\n self.machine_learning_checkbox.setText(_translate('main_window',\n 'Machine Learning'))\n self.branding_label.setText(_translate('main_window', 'SentiCompare'))\n\n def showWindow(self):\n self.main_window.show()\n\n def selectFiles(self):\n file_dialog = FileDialog(self.main_window)\n file_dialog.setFilters(['Text files (*.txt)'])\n file_dialog.setDefaultFilterIndex = 0\n file_dialog.setDefaultDirectory(os.path.expanduser('~'))\n file_dialog.exec()\n if file_dialog.getPath() == '':\n return\n elif file_dialog.getFilename()[2] == '':\n for file in os.listdir(file_dialog.getPath()):\n if file.endswith('.txt') and not file.startswith('.'):\n file_path = os.path.join(file_dialog.getPath(), file)\n if file_path not in self.selected_files:\n self.selected_files[file] = file_path\n item = QStandardItem(file)\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n elif file_dialog.getPath() not in self.selected_files:\n self.selected_files[file_dialog.getFilename()[1]\n ] = file_dialog.getPath()\n item = QStandardItem(file_dialog.getFilename()[1])\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n <mask token>\n\n def run(self):\n if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox.\n isChecked() or self.rule_based_checkbox.isChecked() or self.\n machine_learning_checkbox.isChecked()):\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Missing Parameters')\n message_box.setText(\n \"You haven't selected any methods of sentiment analysis. Please select at least one \"\n + 'method from the list of options.')\n message_box.exec_()\n return\n if self.input_output_box.currentIndex() == 2:\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Select Input')\n message_box.setText(\n 'You must be on the \"Select Files\" page or the \"Manual Input\" page to run '\n +\n 'an analysis. Please select one of those pages and try again.')\n message_box.exec_()\n return\n else:\n progress_bar = QtWidgets.QProgressDialog(\n 'Running Sentiment Analysis...', 'Cancel', 0, 100, self.\n main_window)\n progress_bar.setValue(0)\n progress_bar.setCancelButton(None)\n progress_bar.setWindowModality(QtCore.Qt.WindowModal)\n progress_bar.resize(400, 50)\n progress_bar.show()\n if self.input_output_box.currentIndex() == 0:\n sentiment_analyzer = SentimentAnalyzer(self.selected_files,\n progress_bar, pronoun=self.pronoun_checkbox.isChecked(),\n lexical=self.lexical_checkbox.isChecked(), rule_based=\n self.rule_based_checkbox.isChecked(), machine_learning=\n self.machine_learning_checkbox.isChecked())\n else:\n sentiment_analyzer = SentimentAnalyzer(self.text_input.\n toPlainText(), progress_bar, pronoun=self.\n pronoun_checkbox.isChecked(), lexical=self.\n lexical_checkbox.isChecked(), rule_based=self.\n rule_based_checkbox.isChecked(), machine_learning=self.\n machine_learning_checkbox.isChecked())\n results = sentiment_analyzer.runAnalyses()\n progress_bar.close()\n if results:\n self.results_content_text.setText(results)\n self.input_output_box.setTabEnabled(2, True)\n self.input_output_box.setCurrentIndex(2)\n else:\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Missing Input')\n message_box.setText(\n \"You haven't added any input to analyze. Please select one or more files or \"\n + 'input some data manually.')\n message_box.exec_()\n return\n", "step-3": "<mask token>\n\n\nclass UIMainWindow(object):\n\n def __init__(self):\n font = QtGui.QFont()\n font.setFamily('Myriad Pro')\n font.setPointSize(14)\n self.main_window = QtWidgets.QWidget()\n self.main_window.setFont(font)\n self.main_window.setObjectName('main_window')\n self.main_window.setWindowModality(QtCore.Qt.WindowModal)\n self.main_window.resize(450, 460)\n size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed,\n QtWidgets.QSizePolicy.Fixed)\n size_policy.setHorizontalStretch(0)\n size_policy.setVerticalStretch(0)\n size_policy.setHeightForWidth(self.main_window.sizePolicy().\n hasHeightForWidth())\n self.main_window.setSizePolicy(size_policy)\n self.main_window.setMinimumSize(QtCore.QSize(450, 460))\n self.main_window.setMaximumSize(QtCore.QSize(450, 460))\n self.main_window.setBaseSize(QtCore.QSize(450, 460))\n self.branding_icon = QtWidgets.QLabel(self.main_window)\n self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90))\n self.branding_icon.setText('')\n self.branding_icon.setPixmap(QtGui.QPixmap(\n '../images/senticompare_logo.png'))\n self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt.\n AlignVCenter)\n self.branding_icon.setObjectName('branding_icon')\n self.branding_label = QtWidgets.QLabel(self.main_window)\n self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90))\n palette = QtGui.QPalette()\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)\n self.branding_label.setPalette(palette)\n font = QtGui.QFont()\n font.setFamily('Optima')\n font.setPointSize(50)\n self.branding_label.setFont(font)\n self.branding_label.setObjectName('branding_label')\n self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window)\n self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, \n 430, 50))\n self.horizontal_layout_widget_1.setObjectName(\n 'horizontal_layout_widget_1')\n self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self.\n horizontal_layout_widget_1)\n self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_1.setObjectName('horizontal_layout_1')\n self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1\n )\n self.run_button.setObjectName('run_button')\n self.run_button.clicked.connect(self.run)\n self.horizontal_layout_1.addWidget(self.run_button)\n self.quit_button = QtWidgets.QPushButton(self.\n horizontal_layout_widget_1)\n self.quit_button.setObjectName('quit_button')\n self.quit_button.clicked.connect(self.main_window.close)\n self.horizontal_layout_1.addWidget(self.quit_button)\n self.select_files_tab = QtWidgets.QWidget()\n self.select_files_tab.setObjectName('select_files_tab')\n self.horizontal_layout_widget_2 = QtWidgets.QWidget(self.\n select_files_tab)\n self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, \n 230, 50))\n self.horizontal_layout_widget_2.setObjectName(\n 'horizontal_layout_widget_2')\n self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self.\n horizontal_layout_widget_2)\n self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_2.setObjectName('horizontal_layout_2')\n font.setFamily('Myriad Pro')\n font.setPointSize(12)\n self.input_output_box = QtWidgets.QTabWidget(self.main_window)\n self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300))\n self.input_output_box.setFont(font)\n self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt.\n PointingHandCursor))\n self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North)\n self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded)\n self.input_output_box.setTabsClosable(False)\n self.input_output_box.setObjectName('input_output_box')\n self.file_view = QtWidgets.QListView(self.select_files_tab)\n self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210))\n self.file_view.setObjectName('file_view')\n self.file_view_model = QStandardItemModel(self.file_view)\n self.file_view.setModel(self.file_view_model)\n self.file_view.show()\n self.input_output_box.addTab(self.select_files_tab, '')\n self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2\n )\n self.add_button.setFont(font)\n self.add_button.setObjectName('add_button')\n self.add_button.clicked.connect(self.selectFiles)\n self.horizontal_layout_2.addWidget(self.add_button)\n self.delete_button = QtWidgets.QPushButton(self.\n horizontal_layout_widget_2)\n self.delete_button.setFont(font)\n self.delete_button.setObjectName('delete_button')\n self.delete_button.clicked.connect(self.removeFiles)\n self.horizontal_layout_2.addWidget(self.delete_button)\n self.manual_input_tab = QtWidgets.QWidget()\n self.manual_input_tab.setObjectName('manual_input_tab')\n self.text_input = QtWidgets.QTextEdit(self.manual_input_tab)\n self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.text_input.setObjectName('text_input')\n self.input_output_box.addTab(self.manual_input_tab, '')\n self.results_tab = QtWidgets.QWidget()\n self.results_tab.setObjectName('results_tab')\n self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab)\n self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.results_scroll_box.setWidgetResizable(True)\n self.results_scroll_box.setObjectName('results_scroll_box')\n self.results_content = QtWidgets.QWidget()\n self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250))\n self.results_content.setObjectName('results_content')\n self.results_scroll_box.setWidget(self.results_content)\n self.results_content_text = QtWidgets.QTextEdit(self.results_content)\n self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250))\n self.results_content_text.setReadOnly(True)\n self.results_content_text.setObjectName('results_content_text')\n self.input_output_box.addTab(self.results_tab, '')\n self.input_output_box.setTabEnabled(2, False)\n font.setPointSize(14)\n self.group_box_1 = QtWidgets.QGroupBox(self.main_window)\n self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140))\n self.group_box_1.setFont(font)\n self.group_box_1.setTitle('')\n self.group_box_1.setAlignment(QtCore.Qt.AlignCenter)\n self.group_box_1.setFlat(False)\n self.group_box_1.setCheckable(False)\n self.group_box_1.setObjectName('group_box_1')\n self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1)\n self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141))\n self.vertical_layout_widget_1.setObjectName('vertical_layout_widget_1')\n self.vertical_layout_1 = QtWidgets.QVBoxLayout(self.\n vertical_layout_widget_1)\n self.vertical_layout_1.setContentsMargins(0, 0, 0, 0)\n self.vertical_layout_1.setObjectName('vertical_layout_1')\n self.pronoun_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.pronoun_checkbox.setFont(font)\n self.pronoun_checkbox.setObjectName('pronoun_checkbox')\n self.vertical_layout_1.addWidget(self.pronoun_checkbox)\n self.lexical_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.lexical_checkbox.setFont(font)\n self.lexical_checkbox.setObjectName('lexical_checkbox')\n self.vertical_layout_1.addWidget(self.lexical_checkbox)\n self.rule_based_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.rule_based_checkbox.setFont(font)\n self.rule_based_checkbox.setObjectName('rule_based_checkbox')\n self.vertical_layout_1.addWidget(self.rule_based_checkbox)\n self.machine_learning_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.machine_learning_checkbox.setFont(font)\n self.machine_learning_checkbox.setObjectName(\n 'machine_learning_checkbox')\n self.vertical_layout_1.addWidget(self.machine_learning_checkbox)\n self.help_scroll_box = QtWidgets.QScrollArea(self.main_window)\n self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140))\n self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.help_scroll_box.setWidgetResizable(True)\n self.help_scroll_box.setObjectName('help_scroll_box')\n self.help_content = QtWidgets.QWidget()\n self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138))\n self.help_content.setObjectName('help_content')\n self.help_scroll_box.setWidget(self.help_content)\n self.selected_files = {}\n self.input_output_box.setCurrentIndex(0)\n self.retranslateUI()\n QtCore.QMetaObject.connectSlotsByName(self.main_window)\n\n def retranslateUI(self):\n _translate = QtCore.QCoreApplication.translate\n self.main_window.setWindowTitle(_translate('main_window',\n 'SentiCompare'))\n self.add_button.setText(_translate('main_window', 'Add'))\n self.delete_button.setText(_translate('main_window', 'Delete'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .select_files_tab), _translate('main_window', 'Select Files'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .manual_input_tab), _translate('main_window', 'Manual Input'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .results_tab), _translate('main_window', 'Results'))\n self.run_button.setText(_translate('main_window', 'Run'))\n self.quit_button.setText(_translate('main_window', 'Quit'))\n self.pronoun_checkbox.setText(_translate('main_window',\n 'Pronoun Usage'))\n self.lexical_checkbox.setText(_translate('main_window', 'Lexical'))\n self.rule_based_checkbox.setText(_translate('main_window',\n 'Rule Based'))\n self.machine_learning_checkbox.setText(_translate('main_window',\n 'Machine Learning'))\n self.branding_label.setText(_translate('main_window', 'SentiCompare'))\n\n def showWindow(self):\n self.main_window.show()\n\n def selectFiles(self):\n file_dialog = FileDialog(self.main_window)\n file_dialog.setFilters(['Text files (*.txt)'])\n file_dialog.setDefaultFilterIndex = 0\n file_dialog.setDefaultDirectory(os.path.expanduser('~'))\n file_dialog.exec()\n if file_dialog.getPath() == '':\n return\n elif file_dialog.getFilename()[2] == '':\n for file in os.listdir(file_dialog.getPath()):\n if file.endswith('.txt') and not file.startswith('.'):\n file_path = os.path.join(file_dialog.getPath(), file)\n if file_path not in self.selected_files:\n self.selected_files[file] = file_path\n item = QStandardItem(file)\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n elif file_dialog.getPath() not in self.selected_files:\n self.selected_files[file_dialog.getFilename()[1]\n ] = file_dialog.getPath()\n item = QStandardItem(file_dialog.getFilename()[1])\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n\n def removeFiles(self):\n for i in range(self.file_view_model.rowCount() - 1, -1, -1):\n if self.file_view_model.item(i).checkState():\n filename = self.file_view_model.item(i).text()\n del self.selected_files[filename]\n self.file_view_model.removeRow(i)\n\n def run(self):\n if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox.\n isChecked() or self.rule_based_checkbox.isChecked() or self.\n machine_learning_checkbox.isChecked()):\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Missing Parameters')\n message_box.setText(\n \"You haven't selected any methods of sentiment analysis. Please select at least one \"\n + 'method from the list of options.')\n message_box.exec_()\n return\n if self.input_output_box.currentIndex() == 2:\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Select Input')\n message_box.setText(\n 'You must be on the \"Select Files\" page or the \"Manual Input\" page to run '\n +\n 'an analysis. Please select one of those pages and try again.')\n message_box.exec_()\n return\n else:\n progress_bar = QtWidgets.QProgressDialog(\n 'Running Sentiment Analysis...', 'Cancel', 0, 100, self.\n main_window)\n progress_bar.setValue(0)\n progress_bar.setCancelButton(None)\n progress_bar.setWindowModality(QtCore.Qt.WindowModal)\n progress_bar.resize(400, 50)\n progress_bar.show()\n if self.input_output_box.currentIndex() == 0:\n sentiment_analyzer = SentimentAnalyzer(self.selected_files,\n progress_bar, pronoun=self.pronoun_checkbox.isChecked(),\n lexical=self.lexical_checkbox.isChecked(), rule_based=\n self.rule_based_checkbox.isChecked(), machine_learning=\n self.machine_learning_checkbox.isChecked())\n else:\n sentiment_analyzer = SentimentAnalyzer(self.text_input.\n toPlainText(), progress_bar, pronoun=self.\n pronoun_checkbox.isChecked(), lexical=self.\n lexical_checkbox.isChecked(), rule_based=self.\n rule_based_checkbox.isChecked(), machine_learning=self.\n machine_learning_checkbox.isChecked())\n results = sentiment_analyzer.runAnalyses()\n progress_bar.close()\n if results:\n self.results_content_text.setText(results)\n self.input_output_box.setTabEnabled(2, True)\n self.input_output_box.setCurrentIndex(2)\n else:\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Missing Input')\n message_box.setText(\n \"You haven't added any input to analyze. Please select one or more files or \"\n + 'input some data manually.')\n message_box.exec_()\n return\n", "step-4": "import os\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom PyQt5.QtGui import QStandardItem, QStandardItemModel\nfrom PyQt5.QtWidgets import QMessageBox\nfrom src import FileDialog, SentimentAnalyzer\n\n\nclass UIMainWindow(object):\n\n def __init__(self):\n font = QtGui.QFont()\n font.setFamily('Myriad Pro')\n font.setPointSize(14)\n self.main_window = QtWidgets.QWidget()\n self.main_window.setFont(font)\n self.main_window.setObjectName('main_window')\n self.main_window.setWindowModality(QtCore.Qt.WindowModal)\n self.main_window.resize(450, 460)\n size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed,\n QtWidgets.QSizePolicy.Fixed)\n size_policy.setHorizontalStretch(0)\n size_policy.setVerticalStretch(0)\n size_policy.setHeightForWidth(self.main_window.sizePolicy().\n hasHeightForWidth())\n self.main_window.setSizePolicy(size_policy)\n self.main_window.setMinimumSize(QtCore.QSize(450, 460))\n self.main_window.setMaximumSize(QtCore.QSize(450, 460))\n self.main_window.setBaseSize(QtCore.QSize(450, 460))\n self.branding_icon = QtWidgets.QLabel(self.main_window)\n self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90))\n self.branding_icon.setText('')\n self.branding_icon.setPixmap(QtGui.QPixmap(\n '../images/senticompare_logo.png'))\n self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt.\n AlignVCenter)\n self.branding_icon.setObjectName('branding_icon')\n self.branding_label = QtWidgets.QLabel(self.main_window)\n self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90))\n palette = QtGui.QPalette()\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText,\n brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)\n self.branding_label.setPalette(palette)\n font = QtGui.QFont()\n font.setFamily('Optima')\n font.setPointSize(50)\n self.branding_label.setFont(font)\n self.branding_label.setObjectName('branding_label')\n self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window)\n self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, \n 430, 50))\n self.horizontal_layout_widget_1.setObjectName(\n 'horizontal_layout_widget_1')\n self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self.\n horizontal_layout_widget_1)\n self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_1.setObjectName('horizontal_layout_1')\n self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1\n )\n self.run_button.setObjectName('run_button')\n self.run_button.clicked.connect(self.run)\n self.horizontal_layout_1.addWidget(self.run_button)\n self.quit_button = QtWidgets.QPushButton(self.\n horizontal_layout_widget_1)\n self.quit_button.setObjectName('quit_button')\n self.quit_button.clicked.connect(self.main_window.close)\n self.horizontal_layout_1.addWidget(self.quit_button)\n self.select_files_tab = QtWidgets.QWidget()\n self.select_files_tab.setObjectName('select_files_tab')\n self.horizontal_layout_widget_2 = QtWidgets.QWidget(self.\n select_files_tab)\n self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, \n 230, 50))\n self.horizontal_layout_widget_2.setObjectName(\n 'horizontal_layout_widget_2')\n self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self.\n horizontal_layout_widget_2)\n self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_2.setObjectName('horizontal_layout_2')\n font.setFamily('Myriad Pro')\n font.setPointSize(12)\n self.input_output_box = QtWidgets.QTabWidget(self.main_window)\n self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300))\n self.input_output_box.setFont(font)\n self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt.\n PointingHandCursor))\n self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North)\n self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded)\n self.input_output_box.setTabsClosable(False)\n self.input_output_box.setObjectName('input_output_box')\n self.file_view = QtWidgets.QListView(self.select_files_tab)\n self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210))\n self.file_view.setObjectName('file_view')\n self.file_view_model = QStandardItemModel(self.file_view)\n self.file_view.setModel(self.file_view_model)\n self.file_view.show()\n self.input_output_box.addTab(self.select_files_tab, '')\n self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2\n )\n self.add_button.setFont(font)\n self.add_button.setObjectName('add_button')\n self.add_button.clicked.connect(self.selectFiles)\n self.horizontal_layout_2.addWidget(self.add_button)\n self.delete_button = QtWidgets.QPushButton(self.\n horizontal_layout_widget_2)\n self.delete_button.setFont(font)\n self.delete_button.setObjectName('delete_button')\n self.delete_button.clicked.connect(self.removeFiles)\n self.horizontal_layout_2.addWidget(self.delete_button)\n self.manual_input_tab = QtWidgets.QWidget()\n self.manual_input_tab.setObjectName('manual_input_tab')\n self.text_input = QtWidgets.QTextEdit(self.manual_input_tab)\n self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.text_input.setObjectName('text_input')\n self.input_output_box.addTab(self.manual_input_tab, '')\n self.results_tab = QtWidgets.QWidget()\n self.results_tab.setObjectName('results_tab')\n self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab)\n self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.results_scroll_box.setWidgetResizable(True)\n self.results_scroll_box.setObjectName('results_scroll_box')\n self.results_content = QtWidgets.QWidget()\n self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250))\n self.results_content.setObjectName('results_content')\n self.results_scroll_box.setWidget(self.results_content)\n self.results_content_text = QtWidgets.QTextEdit(self.results_content)\n self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250))\n self.results_content_text.setReadOnly(True)\n self.results_content_text.setObjectName('results_content_text')\n self.input_output_box.addTab(self.results_tab, '')\n self.input_output_box.setTabEnabled(2, False)\n font.setPointSize(14)\n self.group_box_1 = QtWidgets.QGroupBox(self.main_window)\n self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140))\n self.group_box_1.setFont(font)\n self.group_box_1.setTitle('')\n self.group_box_1.setAlignment(QtCore.Qt.AlignCenter)\n self.group_box_1.setFlat(False)\n self.group_box_1.setCheckable(False)\n self.group_box_1.setObjectName('group_box_1')\n self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1)\n self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141))\n self.vertical_layout_widget_1.setObjectName('vertical_layout_widget_1')\n self.vertical_layout_1 = QtWidgets.QVBoxLayout(self.\n vertical_layout_widget_1)\n self.vertical_layout_1.setContentsMargins(0, 0, 0, 0)\n self.vertical_layout_1.setObjectName('vertical_layout_1')\n self.pronoun_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.pronoun_checkbox.setFont(font)\n self.pronoun_checkbox.setObjectName('pronoun_checkbox')\n self.vertical_layout_1.addWidget(self.pronoun_checkbox)\n self.lexical_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.lexical_checkbox.setFont(font)\n self.lexical_checkbox.setObjectName('lexical_checkbox')\n self.vertical_layout_1.addWidget(self.lexical_checkbox)\n self.rule_based_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.rule_based_checkbox.setFont(font)\n self.rule_based_checkbox.setObjectName('rule_based_checkbox')\n self.vertical_layout_1.addWidget(self.rule_based_checkbox)\n self.machine_learning_checkbox = QtWidgets.QCheckBox(self.\n vertical_layout_widget_1)\n self.machine_learning_checkbox.setFont(font)\n self.machine_learning_checkbox.setObjectName(\n 'machine_learning_checkbox')\n self.vertical_layout_1.addWidget(self.machine_learning_checkbox)\n self.help_scroll_box = QtWidgets.QScrollArea(self.main_window)\n self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140))\n self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.help_scroll_box.setWidgetResizable(True)\n self.help_scroll_box.setObjectName('help_scroll_box')\n self.help_content = QtWidgets.QWidget()\n self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138))\n self.help_content.setObjectName('help_content')\n self.help_scroll_box.setWidget(self.help_content)\n self.selected_files = {}\n self.input_output_box.setCurrentIndex(0)\n self.retranslateUI()\n QtCore.QMetaObject.connectSlotsByName(self.main_window)\n\n def retranslateUI(self):\n _translate = QtCore.QCoreApplication.translate\n self.main_window.setWindowTitle(_translate('main_window',\n 'SentiCompare'))\n self.add_button.setText(_translate('main_window', 'Add'))\n self.delete_button.setText(_translate('main_window', 'Delete'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .select_files_tab), _translate('main_window', 'Select Files'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .manual_input_tab), _translate('main_window', 'Manual Input'))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self\n .results_tab), _translate('main_window', 'Results'))\n self.run_button.setText(_translate('main_window', 'Run'))\n self.quit_button.setText(_translate('main_window', 'Quit'))\n self.pronoun_checkbox.setText(_translate('main_window',\n 'Pronoun Usage'))\n self.lexical_checkbox.setText(_translate('main_window', 'Lexical'))\n self.rule_based_checkbox.setText(_translate('main_window',\n 'Rule Based'))\n self.machine_learning_checkbox.setText(_translate('main_window',\n 'Machine Learning'))\n self.branding_label.setText(_translate('main_window', 'SentiCompare'))\n\n def showWindow(self):\n self.main_window.show()\n\n def selectFiles(self):\n file_dialog = FileDialog(self.main_window)\n file_dialog.setFilters(['Text files (*.txt)'])\n file_dialog.setDefaultFilterIndex = 0\n file_dialog.setDefaultDirectory(os.path.expanduser('~'))\n file_dialog.exec()\n if file_dialog.getPath() == '':\n return\n elif file_dialog.getFilename()[2] == '':\n for file in os.listdir(file_dialog.getPath()):\n if file.endswith('.txt') and not file.startswith('.'):\n file_path = os.path.join(file_dialog.getPath(), file)\n if file_path not in self.selected_files:\n self.selected_files[file] = file_path\n item = QStandardItem(file)\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n elif file_dialog.getPath() not in self.selected_files:\n self.selected_files[file_dialog.getFilename()[1]\n ] = file_dialog.getPath()\n item = QStandardItem(file_dialog.getFilename()[1])\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n\n def removeFiles(self):\n for i in range(self.file_view_model.rowCount() - 1, -1, -1):\n if self.file_view_model.item(i).checkState():\n filename = self.file_view_model.item(i).text()\n del self.selected_files[filename]\n self.file_view_model.removeRow(i)\n\n def run(self):\n if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox.\n isChecked() or self.rule_based_checkbox.isChecked() or self.\n machine_learning_checkbox.isChecked()):\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Missing Parameters')\n message_box.setText(\n \"You haven't selected any methods of sentiment analysis. Please select at least one \"\n + 'method from the list of options.')\n message_box.exec_()\n return\n if self.input_output_box.currentIndex() == 2:\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Select Input')\n message_box.setText(\n 'You must be on the \"Select Files\" page or the \"Manual Input\" page to run '\n +\n 'an analysis. Please select one of those pages and try again.')\n message_box.exec_()\n return\n else:\n progress_bar = QtWidgets.QProgressDialog(\n 'Running Sentiment Analysis...', 'Cancel', 0, 100, self.\n main_window)\n progress_bar.setValue(0)\n progress_bar.setCancelButton(None)\n progress_bar.setWindowModality(QtCore.Qt.WindowModal)\n progress_bar.resize(400, 50)\n progress_bar.show()\n if self.input_output_box.currentIndex() == 0:\n sentiment_analyzer = SentimentAnalyzer(self.selected_files,\n progress_bar, pronoun=self.pronoun_checkbox.isChecked(),\n lexical=self.lexical_checkbox.isChecked(), rule_based=\n self.rule_based_checkbox.isChecked(), machine_learning=\n self.machine_learning_checkbox.isChecked())\n else:\n sentiment_analyzer = SentimentAnalyzer(self.text_input.\n toPlainText(), progress_bar, pronoun=self.\n pronoun_checkbox.isChecked(), lexical=self.\n lexical_checkbox.isChecked(), rule_based=self.\n rule_based_checkbox.isChecked(), machine_learning=self.\n machine_learning_checkbox.isChecked())\n results = sentiment_analyzer.runAnalyses()\n progress_bar.close()\n if results:\n self.results_content_text.setText(results)\n self.input_output_box.setTabEnabled(2, True)\n self.input_output_box.setCurrentIndex(2)\n else:\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle('Missing Input')\n message_box.setText(\n \"You haven't added any input to analyze. Please select one or more files or \"\n + 'input some data manually.')\n message_box.exec_()\n return\n", "step-5": "# ================================================== #\n# MAIN WINDOW #\n# ================================================== #\n# Author: Brady Hammond #\n# Created: 11/21/2017 #\n# Last Edited: N/A #\n# Last Edited By: N/A #\n# ================================================== #\n#                     FILE SETUP                     #\n# ================================================== #\n\n\n# Import statements\nimport os\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom PyQt5.QtGui import QStandardItem, QStandardItemModel\nfrom PyQt5.QtWidgets import QMessageBox\nfrom src import FileDialog, SentimentAnalyzer\n\n\n# ================================================== #\n#                 CLASS DEFINITION               #\n# ================================================== #\n\n\n# UIMainWindow class definition\nclass UIMainWindow(object):\n\n # Define __init__ function\n def __init__(self):\n # Create main window\n font = QtGui.QFont()\n font.setFamily(\"Myriad Pro\")\n font.setPointSize(14)\n self.main_window = QtWidgets.QWidget()\n self.main_window.setFont(font)\n self.main_window.setObjectName(\"main_window\")\n self.main_window.setWindowModality(QtCore.Qt.WindowModal)\n self.main_window.resize(450, 460)\n size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n size_policy.setHorizontalStretch(0)\n size_policy.setVerticalStretch(0)\n size_policy.setHeightForWidth(self.main_window.sizePolicy().hasHeightForWidth())\n self.main_window.setSizePolicy(size_policy)\n self.main_window.setMinimumSize(QtCore.QSize(450, 460))\n self.main_window.setMaximumSize(QtCore.QSize(450, 460))\n self.main_window.setBaseSize(QtCore.QSize(450, 460))\n\n # Create branding icon\n self.branding_icon = QtWidgets.QLabel(self.main_window)\n self.branding_icon.setGeometry(QtCore.QRect(20, 5, 90, 90))\n self.branding_icon.setText(\"\")\n self.branding_icon.setPixmap(QtGui.QPixmap(\"../images/senticompare_logo.png\"))\n self.branding_icon.setAlignment(QtCore.Qt.AlignJustify | QtCore.Qt.AlignVCenter)\n self.branding_icon.setObjectName(\"branding_icon\")\n\n # Create branding label\n self.branding_label = QtWidgets.QLabel(self.main_window)\n self.branding_label.setGeometry(QtCore.QRect(110, 5, 330, 90))\n palette = QtGui.QPalette()\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(81, 108, 146))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)\n brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)\n brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))\n brush.setStyle(QtCore.Qt.SolidPattern)\n palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)\n self.branding_label.setPalette(palette)\n font = QtGui.QFont()\n font.setFamily(\"Optima\")\n font.setPointSize(50)\n self.branding_label.setFont(font)\n self.branding_label.setObjectName(\"branding_label\")\n\n # Create first horizontal layout\n self.horizontal_layout_widget_1 = QtWidgets.QWidget(self.main_window)\n self.horizontal_layout_widget_1.setGeometry(QtCore.QRect(10, 410, 430, 50))\n self.horizontal_layout_widget_1.setObjectName(\"horizontal_layout_widget_1\")\n self.horizontal_layout_1 = QtWidgets.QHBoxLayout(self.horizontal_layout_widget_1)\n self.horizontal_layout_1.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_1.setObjectName(\"horizontal_layout_1\")\n\n # Create run button\n self.run_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1)\n self.run_button.setObjectName(\"run_button\")\n self.run_button.clicked.connect(self.run)\n\n # Add run button to first horizontal layout\n self.horizontal_layout_1.addWidget(self.run_button)\n\n # Create quit button\n self.quit_button = QtWidgets.QPushButton(self.horizontal_layout_widget_1)\n self.quit_button.setObjectName(\"quit_button\")\n self.quit_button.clicked.connect(self.main_window.close)\n\n # Add quit button to first horizontal layout\n self.horizontal_layout_1.addWidget(self.quit_button)\n\n # Create file selection tab\n self.select_files_tab = QtWidgets.QWidget()\n self.select_files_tab.setObjectName(\"select_files_tab\")\n\n # Create second horizontal layout\n self.horizontal_layout_widget_2 = QtWidgets.QWidget(self.select_files_tab)\n self.horizontal_layout_widget_2.setGeometry(QtCore.QRect(10, 230, 230, 50))\n self.horizontal_layout_widget_2.setObjectName(\"horizontal_layout_widget_2\")\n self.horizontal_layout_2 = QtWidgets.QHBoxLayout(self.horizontal_layout_widget_2)\n self.horizontal_layout_2.setContentsMargins(0, 0, 0, 0)\n self.horizontal_layout_2.setObjectName(\"horizontal_layout_2\")\n\n # Create input/output tab window\n font.setFamily(\"Myriad Pro\")\n font.setPointSize(12)\n self.input_output_box = QtWidgets.QTabWidget(self.main_window)\n self.input_output_box.setGeometry(QtCore.QRect(10, 100, 260, 300))\n self.input_output_box.setFont(font)\n self.input_output_box.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))\n self.input_output_box.setTabPosition(QtWidgets.QTabWidget.North)\n self.input_output_box.setTabShape(QtWidgets.QTabWidget.Rounded)\n self.input_output_box.setTabsClosable(False)\n self.input_output_box.setObjectName(\"input_output_box\")\n\n # Create file view\n self.file_view = QtWidgets.QListView(self.select_files_tab)\n self.file_view.setGeometry(QtCore.QRect(10, 10, 235, 210))\n self.file_view.setObjectName(\"file_view\")\n\n # Create file view model\n self.file_view_model = QStandardItemModel(self.file_view)\n\n # Add file view model to file view\n self.file_view.setModel(self.file_view_model)\n\n # Show file view\n self.file_view.show()\n\n # Add file selection tab to input/output tab window\n self.input_output_box.addTab(self.select_files_tab, \"\")\n\n # Create add button\n self.add_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2)\n self.add_button.setFont(font)\n self.add_button.setObjectName(\"add_button\")\n self.add_button.clicked.connect(self.selectFiles)\n\n # Add add button to second horizontal layout\n self.horizontal_layout_2.addWidget(self.add_button)\n\n # Create delete button\n self.delete_button = QtWidgets.QPushButton(self.horizontal_layout_widget_2)\n self.delete_button.setFont(font)\n self.delete_button.setObjectName(\"delete_button\")\n self.delete_button.clicked.connect(self.removeFiles)\n\n # Add delete button to second horizontal layout\n self.horizontal_layout_2.addWidget(self.delete_button)\n\n # Create manual input tab\n self.manual_input_tab = QtWidgets.QWidget()\n self.manual_input_tab.setObjectName(\"manual_input_tab\")\n\n # Create text input\n self.text_input = QtWidgets.QTextEdit(self.manual_input_tab)\n self.text_input.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.text_input.setObjectName(\"text_input\")\n\n # Add text input to manual input tab\n self.input_output_box.addTab(self.manual_input_tab, \"\")\n\n # Create results tab\n self.results_tab = QtWidgets.QWidget()\n self.results_tab.setObjectName(\"results_tab\")\n\n # Create results scroll box\n self.results_scroll_box = QtWidgets.QScrollArea(self.results_tab)\n self.results_scroll_box.setGeometry(QtCore.QRect(10, 10, 235, 250))\n self.results_scroll_box.setWidgetResizable(True)\n self.results_scroll_box.setObjectName(\"results_scroll_box\")\n\n # Create results content\n self.results_content = QtWidgets.QWidget()\n self.results_content.setGeometry(QtCore.QRect(0, 0, 230, 250))\n self.results_content.setObjectName(\"results_content\")\n self.results_scroll_box.setWidget(self.results_content)\n\n # Create results content text\n self.results_content_text = QtWidgets.QTextEdit(self.results_content)\n self.results_content_text.setGeometry(QtCore.QRect(-1, -1, 235, 250))\n self.results_content_text.setReadOnly(True)\n self.results_content_text.setObjectName(\"results_content_text\")\n\n # Add results tab to input/output tab window\n self.input_output_box.addTab(self.results_tab, \"\")\n\n # Disable results tab\n self.input_output_box.setTabEnabled(2, False)\n\n # Create first group box\n font.setPointSize(14)\n self.group_box_1 = QtWidgets.QGroupBox(self.main_window)\n self.group_box_1.setGeometry(QtCore.QRect(280, 110, 160, 140))\n self.group_box_1.setFont(font)\n self.group_box_1.setTitle(\"\")\n self.group_box_1.setAlignment(QtCore.Qt.AlignCenter)\n self.group_box_1.setFlat(False)\n self.group_box_1.setCheckable(False)\n self.group_box_1.setObjectName(\"group_box_1\")\n\n # Create first vertical layout\n self.vertical_layout_widget_1 = QtWidgets.QWidget(self.group_box_1)\n self.vertical_layout_widget_1.setGeometry(QtCore.QRect(9, 0, 141, 141))\n self.vertical_layout_widget_1.setObjectName(\"vertical_layout_widget_1\")\n self.vertical_layout_1 = QtWidgets.QVBoxLayout(self.vertical_layout_widget_1)\n self.vertical_layout_1.setContentsMargins(0, 0, 0, 0)\n self.vertical_layout_1.setObjectName(\"vertical_layout_1\")\n\n # Create pronoun checkbox\n self.pronoun_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1)\n self.pronoun_checkbox.setFont(font)\n self.pronoun_checkbox.setObjectName(\"pronoun_checkbox\")\n\n # Add pronoun checkbox to first vertical layout\n self.vertical_layout_1.addWidget(self.pronoun_checkbox)\n\n # Create lexical checkbox\n self.lexical_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1)\n self.lexical_checkbox.setFont(font)\n self.lexical_checkbox.setObjectName(\"lexical_checkbox\")\n\n # Add lexical checkbox to first vertical layout\n self.vertical_layout_1.addWidget(self.lexical_checkbox)\n\n # Create rule based checkbox\n self.rule_based_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1)\n self.rule_based_checkbox.setFont(font)\n self.rule_based_checkbox.setObjectName(\"rule_based_checkbox\")\n\n # Add rule_based checkbox to first vertical layout\n self.vertical_layout_1.addWidget(self.rule_based_checkbox)\n\n # Create machine learning checkbox\n self.machine_learning_checkbox = QtWidgets.QCheckBox(self.vertical_layout_widget_1)\n self.machine_learning_checkbox.setFont(font)\n self.machine_learning_checkbox.setObjectName(\"machine_learning_checkbox\")\n\n # Add machine learning checkbox to first vertical layout\n self.vertical_layout_1.addWidget(self.machine_learning_checkbox)\n\n # Create help scroll box\n self.help_scroll_box = QtWidgets.QScrollArea(self.main_window)\n self.help_scroll_box.setGeometry(QtCore.QRect(280, 260, 160, 140))\n self.help_scroll_box.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.help_scroll_box.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.help_scroll_box.setWidgetResizable(True)\n self.help_scroll_box.setObjectName(\"help_scroll_box\")\n\n # Create help content\n self.help_content = QtWidgets.QWidget()\n self.help_content.setGeometry(QtCore.QRect(0, 0, 158, 138))\n self.help_content.setObjectName(\"help_content\")\n self.help_scroll_box.setWidget(self.help_content)\n\n # Create selected files variable\n self.selected_files = {}\n\n # Set current tab\n self.input_output_box.setCurrentIndex(0)\n\n # Retranslate UI\n self.retranslateUI()\n\n # Connect UI slots\n QtCore.QMetaObject.connectSlotsByName(self.main_window)\n\n # ============================================== #\n\n # Define retranslateUI function\n def retranslateUI(self):\n # Add text to ui elements\n _translate = QtCore.QCoreApplication.translate\n self.main_window.setWindowTitle(_translate(\"main_window\", \"SentiCompare\"))\n self.add_button.setText(_translate(\"main_window\", \"Add\"))\n self.delete_button.setText(_translate(\"main_window\", \"Delete\"))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self.select_files_tab),\n _translate(\"main_window\", \"Select Files\"))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self.manual_input_tab),\n _translate(\"main_window\", \"Manual Input\"))\n self.input_output_box.setTabText(self.input_output_box.indexOf(self.results_tab),\n _translate(\"main_window\", \"Results\"))\n self.run_button.setText(_translate(\"main_window\", \"Run\"))\n self.quit_button.setText(_translate(\"main_window\", \"Quit\"))\n self.pronoun_checkbox.setText(_translate(\"main_window\", \"Pronoun Usage\"))\n self.lexical_checkbox.setText(_translate(\"main_window\", \"Lexical\"))\n self.rule_based_checkbox.setText(_translate(\"main_window\", \"Rule Based\"))\n self.machine_learning_checkbox.setText(_translate(\"main_window\", \"Machine Learning\"))\n self.branding_label.setText(_translate(\"main_window\", \"SentiCompare\"))\n\n # ============================================== #\n\n # Define showWindow function\n def showWindow(self):\n self.main_window.show()\n\n # ============================================== #\n\n # Define selectFiles function\n def selectFiles(self):\n # Create file dialog\n file_dialog = FileDialog(self.main_window)\n file_dialog.setFilters([\"Text files (*.txt)\"])\n file_dialog.setDefaultFilterIndex = 0\n file_dialog.setDefaultDirectory(os.path.expanduser('~'))\n file_dialog.exec()\n\n # Return if nothing was selected\n if file_dialog.getPath() == '':\n return\n\n # Add files from selected directory to file list\n elif file_dialog.getFilename()[2] == '':\n for file in os.listdir(file_dialog.getPath()):\n if file.endswith('.txt') and not file.startswith('.'):\n file_path = os.path.join(file_dialog.getPath(), file)\n\n if file_path not in self.selected_files:\n self.selected_files[file] = file_path\n\n item = QStandardItem(file)\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n\n # Add selected file to list\n else:\n if file_dialog.getPath() not in self.selected_files:\n self.selected_files[file_dialog.getFilename()[1]] = file_dialog.getPath()\n\n item = QStandardItem(file_dialog.getFilename()[1])\n item.setCheckable(True)\n self.file_view_model.appendRow(item)\n\n # ============================================== #\n\n # Define removeFiles function\n def removeFiles(self):\n # Remove all checked files\n for i in range(self.file_view_model.rowCount() - 1, -1, -1):\n if self.file_view_model.item(i).checkState():\n filename = self.file_view_model.item(i).text()\n del self.selected_files[filename]\n self.file_view_model.removeRow(i)\n\n # ============================================== #\n\n # Define run function\n def run(self):\n # Check if an analysis method is selected\n if not (self.pronoun_checkbox.isChecked() or self.lexical_checkbox.isChecked() or\n self.rule_based_checkbox.isChecked() or self.machine_learning_checkbox.isChecked()):\n # Create and show an error message\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle(\"Missing Parameters\")\n message_box.setText(\"You haven't selected any methods of sentiment analysis. Please select at least one \" +\n \"method from the list of options.\")\n message_box.exec_()\n return\n\n # Check if the current tab is valid\n if self.input_output_box.currentIndex() == 2:\n # Create and show error message\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle(\"Select Input\")\n message_box.setText(\"You must be on the \\\"Select Files\\\" page or the \\\"Manual Input\\\" page to run \" +\n \"an analysis. Please select one of those pages and try again.\")\n message_box.exec_()\n return\n\n else:\n progress_bar = QtWidgets.QProgressDialog(\"Running Sentiment Analysis...\", \"Cancel\", 0, 100, self.main_window)\n progress_bar.setValue(0)\n progress_bar.setCancelButton(None)\n progress_bar.setWindowModality(QtCore.Qt.WindowModal)\n progress_bar.resize(400, 50)\n progress_bar.show()\n\n # Analyze selected files\n if self.input_output_box.currentIndex() == 0:\n sentiment_analyzer = SentimentAnalyzer(self.selected_files, progress_bar, pronoun=self.pronoun_checkbox.isChecked(),\n lexical=self.lexical_checkbox.isChecked(),\n rule_based=self.rule_based_checkbox.isChecked(),\n machine_learning=self.machine_learning_checkbox.isChecked())\n\n # Analyze manual input\n else:\n sentiment_analyzer = SentimentAnalyzer(self.text_input.toPlainText(), progress_bar, pronoun=self.pronoun_checkbox.isChecked(),\n lexical=self.lexical_checkbox.isChecked(),\n rule_based=self.rule_based_checkbox.isChecked(),\n machine_learning=self.machine_learning_checkbox.isChecked())\n\n results = sentiment_analyzer.runAnalyses()\n progress_bar.close()\n\n if results:\n self.results_content_text.setText(results)\n self.input_output_box.setTabEnabled(2, True)\n self.input_output_box.setCurrentIndex(2)\n\n else:\n message_box = QMessageBox()\n message_box.setIcon(QMessageBox.Warning)\n message_box.setWindowTitle(\"Missing Input\")\n message_box.setText(\"You haven't added any input to analyze. Please select one or more files or \" +\n \"input some data manually.\")\n message_box.exec_()\n return\n\n# ================================================== #\n# EOF #\n# ================================================== #\n", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
''' Statistics models module. This module contains the database models for the Statistics class and the StatisticsCategory class. @author Hubert Ngu @author Jason Hou ''' from django.db import models class Statistics(models.Model): ''' Statistics model class. This represents a single tuple in the statitics_generator_statistics table in the database. ''' number_surveys = models.IntegerField() number_listings = models.IntegerField() number_buyer_surveys = models.IntegerField() number_seller_surveys = models.IntegerField() number_buyer_listings = models.IntegerField() number_seller_listings = models.IntegerField() average_transaction_amount = models.FloatField() buyer_transaction_amount = models.FloatField() seller_transaction_amount = models.FloatField() successful_transaction_amount = models.FloatField() average_transaction_time = models.IntegerField() buyer_transaction_success_rate = models.FloatField() seller_transaction_success_rate = models.FloatField() total_transaction_success_rate = models.FloatField() class StatisticsCategory(models.Model): ''' StatisticsCategory model class. This represents a single tuple in the statitics_generator_statisticscategory table in the database. ''' statistics_id = models.IntegerField() category = models.CharField(max_length=30) survey_count = models.IntegerField() buyer_count = models.IntegerField() seller_count = models.IntegerField() amount = models.IntegerField()
normal
{ "blob_id": "728f9402b3ce4b297be82b3ba1a17c4180ac7c0d", "index": 8839, "step-1": "<mask token>\n\n\nclass Statistics(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass StatisticsCategory(models.Model):\n \"\"\"\n\tStatisticsCategory model class. This represents a single tuple in the\n\tstatitics_generator_statisticscategory table in the database.\n\t\"\"\"\n statistics_id = models.IntegerField()\n category = models.CharField(max_length=30)\n survey_count = models.IntegerField()\n buyer_count = models.IntegerField()\n seller_count = models.IntegerField()\n amount = models.IntegerField()\n", "step-2": "<mask token>\n\n\nclass Statistics(models.Model):\n <mask token>\n number_surveys = models.IntegerField()\n number_listings = models.IntegerField()\n number_buyer_surveys = models.IntegerField()\n number_seller_surveys = models.IntegerField()\n number_buyer_listings = models.IntegerField()\n number_seller_listings = models.IntegerField()\n average_transaction_amount = models.FloatField()\n buyer_transaction_amount = models.FloatField()\n seller_transaction_amount = models.FloatField()\n successful_transaction_amount = models.FloatField()\n average_transaction_time = models.IntegerField()\n buyer_transaction_success_rate = models.FloatField()\n seller_transaction_success_rate = models.FloatField()\n total_transaction_success_rate = models.FloatField()\n\n\nclass StatisticsCategory(models.Model):\n \"\"\"\n\tStatisticsCategory model class. This represents a single tuple in the\n\tstatitics_generator_statisticscategory table in the database.\n\t\"\"\"\n statistics_id = models.IntegerField()\n category = models.CharField(max_length=30)\n survey_count = models.IntegerField()\n buyer_count = models.IntegerField()\n seller_count = models.IntegerField()\n amount = models.IntegerField()\n", "step-3": "<mask token>\n\n\nclass Statistics(models.Model):\n \"\"\"\n\tStatistics model class. This represents a single tuple in the\n\tstatitics_generator_statistics table in the database.\n\t\"\"\"\n number_surveys = models.IntegerField()\n number_listings = models.IntegerField()\n number_buyer_surveys = models.IntegerField()\n number_seller_surveys = models.IntegerField()\n number_buyer_listings = models.IntegerField()\n number_seller_listings = models.IntegerField()\n average_transaction_amount = models.FloatField()\n buyer_transaction_amount = models.FloatField()\n seller_transaction_amount = models.FloatField()\n successful_transaction_amount = models.FloatField()\n average_transaction_time = models.IntegerField()\n buyer_transaction_success_rate = models.FloatField()\n seller_transaction_success_rate = models.FloatField()\n total_transaction_success_rate = models.FloatField()\n\n\nclass StatisticsCategory(models.Model):\n \"\"\"\n\tStatisticsCategory model class. This represents a single tuple in the\n\tstatitics_generator_statisticscategory table in the database.\n\t\"\"\"\n statistics_id = models.IntegerField()\n category = models.CharField(max_length=30)\n survey_count = models.IntegerField()\n buyer_count = models.IntegerField()\n seller_count = models.IntegerField()\n amount = models.IntegerField()\n", "step-4": "<mask token>\nfrom django.db import models\n\n\nclass Statistics(models.Model):\n \"\"\"\n\tStatistics model class. This represents a single tuple in the\n\tstatitics_generator_statistics table in the database.\n\t\"\"\"\n number_surveys = models.IntegerField()\n number_listings = models.IntegerField()\n number_buyer_surveys = models.IntegerField()\n number_seller_surveys = models.IntegerField()\n number_buyer_listings = models.IntegerField()\n number_seller_listings = models.IntegerField()\n average_transaction_amount = models.FloatField()\n buyer_transaction_amount = models.FloatField()\n seller_transaction_amount = models.FloatField()\n successful_transaction_amount = models.FloatField()\n average_transaction_time = models.IntegerField()\n buyer_transaction_success_rate = models.FloatField()\n seller_transaction_success_rate = models.FloatField()\n total_transaction_success_rate = models.FloatField()\n\n\nclass StatisticsCategory(models.Model):\n \"\"\"\n\tStatisticsCategory model class. This represents a single tuple in the\n\tstatitics_generator_statisticscategory table in the database.\n\t\"\"\"\n statistics_id = models.IntegerField()\n category = models.CharField(max_length=30)\n survey_count = models.IntegerField()\n buyer_count = models.IntegerField()\n seller_count = models.IntegerField()\n amount = models.IntegerField()\n", "step-5": "'''\r\n Statistics models module. This module contains the database models for the\r\n Statistics class and the StatisticsCategory class.\r\n\r\n @author Hubert Ngu\r\n @author Jason Hou\r\n'''\r\n\r\nfrom django.db import models\r\n\r\nclass Statistics(models.Model):\r\n\t'''\r\n\tStatistics model class. This represents a single tuple in the\r\n\tstatitics_generator_statistics table in the database.\r\n\t'''\r\n\tnumber_surveys = models.IntegerField()\r\n\tnumber_listings = models.IntegerField()\r\n\tnumber_buyer_surveys = models.IntegerField()\r\n\tnumber_seller_surveys = models.IntegerField()\r\n\tnumber_buyer_listings = models.IntegerField()\r\n\tnumber_seller_listings = models.IntegerField()\r\n\taverage_transaction_amount = models.FloatField()\r\n\tbuyer_transaction_amount = models.FloatField()\r\n\tseller_transaction_amount = models.FloatField()\r\n\tsuccessful_transaction_amount = models.FloatField()\r\n\taverage_transaction_time = models.IntegerField()\r\n\tbuyer_transaction_success_rate = models.FloatField()\r\n\tseller_transaction_success_rate = models.FloatField()\r\n\ttotal_transaction_success_rate = models.FloatField()\r\n\r\nclass StatisticsCategory(models.Model):\r\n\t'''\r\n\tStatisticsCategory model class. This represents a single tuple in the\r\n\tstatitics_generator_statisticscategory table in the database.\r\n\t'''\r\n\tstatistics_id = models.IntegerField()\r\n\tcategory = models.CharField(max_length=30)\r\n\tsurvey_count = models.IntegerField()\r\n\tbuyer_count = models.IntegerField()\r\n\tseller_count = models.IntegerField()\r\n\tamount = models.IntegerField()", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def comp_point_ref(self, is_set=False): """Compute the point ref of the Surface Parameters ---------- self : SurfLine A SurfLine object is_set: bool True to update the point_ref property Returns ------- point_ref : complex the reference point of the surface """ point_list = list() for line in self.get_lines(): point_list.append(line.get_middle()) point_ref = sum(array(point_list)) / len(point_list) if is_set: self.point_ref = point_ref return point_ref <|reserved_special_token_1|> from numpy import array, sum def comp_point_ref(self, is_set=False): """Compute the point ref of the Surface Parameters ---------- self : SurfLine A SurfLine object is_set: bool True to update the point_ref property Returns ------- point_ref : complex the reference point of the surface """ point_list = list() for line in self.get_lines(): point_list.append(line.get_middle()) point_ref = sum(array(point_list)) / len(point_list) if is_set: self.point_ref = point_ref return point_ref
flexible
{ "blob_id": "b7721e95cfb509a7c0c6ccdffa3a8ca2c6bd6033", "index": 6713, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef comp_point_ref(self, is_set=False):\n \"\"\"Compute the point ref of the Surface\n\n Parameters\n ----------\n self : SurfLine\n A SurfLine object\n is_set: bool\n True to update the point_ref property\n\n Returns\n -------\n point_ref : complex\n the reference point of the surface\n \"\"\"\n point_list = list()\n for line in self.get_lines():\n point_list.append(line.get_middle())\n point_ref = sum(array(point_list)) / len(point_list)\n if is_set:\n self.point_ref = point_ref\n return point_ref\n", "step-3": "from numpy import array, sum\n\n\ndef comp_point_ref(self, is_set=False):\n \"\"\"Compute the point ref of the Surface\n\n Parameters\n ----------\n self : SurfLine\n A SurfLine object\n is_set: bool\n True to update the point_ref property\n\n Returns\n -------\n point_ref : complex\n the reference point of the surface\n \"\"\"\n point_list = list()\n for line in self.get_lines():\n point_list.append(line.get_middle())\n point_ref = sum(array(point_list)) / len(point_list)\n if is_set:\n self.point_ref = point_ref\n return point_ref\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
from scipy.stats import mannwhitneyu import matplotlib.patches as patches import os import numpy import pandas from matplotlib.gridspec import GridSpec from scipy.cluster.hierarchy import fcluster, linkage, dendrogram from scipy.spatial.distance import squareform import seaborn as sns from scipy.stats import spearmanr from statsmodels.stats.multitest import multipletests import matplotlib.pyplot as plt from config import base_path, out_path MIN_OLIS = 200 THROW_BAD_OLIS = True MIN_APPEAR = 0.02 CLUST_TH = 0.7 MIN_CLUST = 10 def get_clusters(link, dn, inds, th=0.7): clst = fcluster(link, criterion='distance', t=th) return pandas.Series(index=inds, data=clst).iloc[dn['leaves']] def draw_significant_groups(groups, dn_ax, color='white'): # Draw boxes around clusters for group in groups: rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] - group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=color, facecolor='none') dn_ax.add_patch(rect) def draw_legume_group(group, ax): y_values = ax.get_ylim() x_values = ax.get_xlim() rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6) ax.add_patch(rect) rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] - group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6) ax.add_patch(rect) def get_groups(clst, clust_above=MIN_CLUST): groups = [] v = -1 for i in range(len(clst)): if clst[i] == v: continue if v == -1: groups.append([i]) v = clst[i] continue if (i - groups[-1][0]) >= clust_above: groups[-1].append(i) groups.append([i]) else: groups[-1][0] = i v = clst[i] groups = groups[:-1] return groups if __name__ == "__main__": os.makedirs(out_path, exist_ok=True) df_info = pandas.read_csv(os.path.join(base_path, "library_contents.csv"), index_col=0, low_memory=False) df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)] inds = df_info.index l_base = len(inds) meta_df = pandas.read_csv(os.path.join(base_path, "cohort.csv"), index_col=0, low_memory=False) meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >= MIN_OLIS)] fold_df = pandas.read_csv(os.path.join(base_path, "fold_data.csv"), index_col=[0, 1], low_memory=False).loc[meta_df.index].unstack() fold_df.columns = fold_df.columns.get_level_values(1) fold_df = fold_df[fold_df.columns.intersection(inds)] if THROW_BAD_OLIS: drop = fold_df.columns[(fold_df == -1).sum() > 0] fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1) inds = df_info.index.difference(drop) df_info = df_info.loc[inds] fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > (MIN_APPEAR * len(fold_df))]] fold_df = numpy.log(fold_df.fillna(1)) df_info = df_info.loc[fold_df.columns] th = CLUST_TH # Oligos level correlations corr = fold_df.corr('spearman') link = linkage(squareform(1 - corr), method='average') dn = dendrogram(link, no_plot=True) clst = get_clusters(link, dn, corr.columns, th) groups = get_groups(clst) # Samples level correlations corr1 = fold_df.T.corr('spearman') link1 = linkage(squareform(1 - corr1), method='average') dn1 = dendrogram(link1, no_plot=True) clst1 = get_clusters(link1, dn1, corr1.columns, th) groups1 = get_groups(clst1) # Define figure fig = plt.figure(figsize=[9.2, 12]) gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1]) # Plot heatmap bar_ax = fig.add_subplot(gs[0]) dendogram_ax = fig.add_subplot(gs[1]) sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.color_palette('flare', as_cmap=True), ax=dendogram_ax, yticklabels=False, xticklabels=False, cbar_ax=bar_ax) dendogram_ax.set_xlabel("oligos") dendogram_ax.set_ylabel("samples") # Plot sample level bars mt = 'normalized mt_1342' bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax) meta_df['yob'] = (meta_df['yob'] - 1944) / 60 use_columns = ['gender', 'yob'] sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt], left_index=True, right_index=True, how='left') sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt].min()) / (sample_extra_info[mt].max() - sample_extra_info[mt].min())).astype(float) sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True) mt = 'norm mt_1342' sample_extra_info = sample_extra_info.iloc[dn1['leaves']] sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.columns, yticklabels=False, ax=bar_axis1, cmap=sns.color_palette("viridis", as_cmap=True)) # Compute significant shared groups fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy() significant_groups = [] for oligo_subgroup in groups: sample_group_means = sorted(enumerate( [fold_df.iloc[range(*sample_group), range(*oligo_subgroup)].mean().mean() for sample_group in groups1]), key=lambda x: -x[1]) if sample_group_means[0][1] > 2 * sample_group_means[1][1]: significant_groups.append([oligo_subgroup, groups1[sample_group_means[0][0]]]) draw_significant_groups(significant_groups, dendogram_ax) mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*sample_group)][mt].dropna(), sample_extra_info.iloc[list(range(0, sample_group[0])) + list(range(sample_group[1], len(sample_extra_info)))] [mt].dropna())[1] for oligos_group, sample_group in significant_groups]) mt_group = significant_groups[mt_scores.idxmin()] mt_pval = mt_scores.min() draw_significant_groups([mt_group], dendogram_ax, color='blue') draw_legume_group(mt_group[1], bar_axis1) plt.suptitle('For group marked in blue the %s level\nof samples in group vs those not in group\n' % mt + 'got MW p-value of %g' % mt_pval) plt.savefig(os.path.join(out_path, "legumes.png")) res = {} inds = sample_extra_info[mt].dropna().index for i in range(*mt_group[0]): col = fold_df.columns[i] res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[inds, col].values) res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval') res["Bonf"] = res['pval'] * len(res) FDR = multipletests(res.pval.values.tolist(), method='fdr_by') res["FDR_BY"] = FDR[0] res['FDR_BY_qval'] = FDR[1] FDR = multipletests(res.pval.values.tolist(), method='fdr_bh') res["FDR_BH"] = FDR[0] res['FDR_BH_qval'] = FDR[1] res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name print("Of %d oligos in the blue group %d pass FDR (BY) vs %s" % (len(res), len(res[res.FDR_BY]), mt)) res.to_csv(os.path.join(out_path, "mt_1342.csv"))
normal
{ "blob_id": "bfd31d0b80511721ee5117daced04eaf63679fd8", "index": 2230, "step-1": "<mask token>\n\n\ndef get_clusters(link, dn, inds, th=0.7):\n clst = fcluster(link, criterion='distance', t=th)\n return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]\n\n\ndef draw_significant_groups(groups, dn_ax, color='white'):\n for group in groups:\n rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -\n group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=\n color, facecolor='none')\n dn_ax.add_patch(rect)\n\n\ndef draw_legume_group(group, ax):\n y_values = ax.get_ylim()\n x_values = ax.get_xlim()\n rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,\n edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -\n group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n\n\ndef get_groups(clst, clust_above=MIN_CLUST):\n groups = []\n v = -1\n for i in range(len(clst)):\n if clst[i] == v:\n continue\n if v == -1:\n groups.append([i])\n v = clst[i]\n continue\n if i - groups[-1][0] >= clust_above:\n groups[-1].append(i)\n groups.append([i])\n else:\n groups[-1][0] = i\n v = clst[i]\n groups = groups[:-1]\n return groups\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_clusters(link, dn, inds, th=0.7):\n clst = fcluster(link, criterion='distance', t=th)\n return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]\n\n\ndef draw_significant_groups(groups, dn_ax, color='white'):\n for group in groups:\n rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -\n group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=\n color, facecolor='none')\n dn_ax.add_patch(rect)\n\n\ndef draw_legume_group(group, ax):\n y_values = ax.get_ylim()\n x_values = ax.get_xlim()\n rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,\n edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -\n group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n\n\ndef get_groups(clst, clust_above=MIN_CLUST):\n groups = []\n v = -1\n for i in range(len(clst)):\n if clst[i] == v:\n continue\n if v == -1:\n groups.append([i])\n v = clst[i]\n continue\n if i - groups[-1][0] >= clust_above:\n groups[-1].append(i)\n groups.append([i])\n else:\n groups[-1][0] = i\n v = clst[i]\n groups = groups[:-1]\n return groups\n\n\nif __name__ == '__main__':\n os.makedirs(out_path, exist_ok=True)\n df_info = pandas.read_csv(os.path.join(base_path,\n 'library_contents.csv'), index_col=0, low_memory=False)\n df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]\n inds = df_info.index\n l_base = len(inds)\n meta_df = pandas.read_csv(os.path.join(base_path, 'cohort.csv'),\n index_col=0, low_memory=False)\n meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >=\n MIN_OLIS)]\n fold_df = pandas.read_csv(os.path.join(base_path, 'fold_data.csv'),\n index_col=[0, 1], low_memory=False).loc[meta_df.index].unstack()\n fold_df.columns = fold_df.columns.get_level_values(1)\n fold_df = fold_df[fold_df.columns.intersection(inds)]\n if THROW_BAD_OLIS:\n drop = fold_df.columns[(fold_df == -1).sum() > 0]\n fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)\n inds = df_info.index.difference(drop)\n df_info = df_info.loc[inds]\n fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > MIN_APPEAR *\n len(fold_df)]]\n fold_df = numpy.log(fold_df.fillna(1))\n df_info = df_info.loc[fold_df.columns]\n th = CLUST_TH\n corr = fold_df.corr('spearman')\n link = linkage(squareform(1 - corr), method='average')\n dn = dendrogram(link, no_plot=True)\n clst = get_clusters(link, dn, corr.columns, th)\n groups = get_groups(clst)\n corr1 = fold_df.T.corr('spearman')\n link1 = linkage(squareform(1 - corr1), method='average')\n dn1 = dendrogram(link1, no_plot=True)\n clst1 = get_clusters(link1, dn1, corr1.columns, th)\n groups1 = get_groups(clst1)\n fig = plt.figure(figsize=[9.2, 12])\n gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])\n bar_ax = fig.add_subplot(gs[0])\n dendogram_ax = fig.add_subplot(gs[1])\n sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.\n color_palette('flare', as_cmap=True), ax=dendogram_ax, yticklabels=\n False, xticklabels=False, cbar_ax=bar_ax)\n dendogram_ax.set_xlabel('oligos')\n dendogram_ax.set_ylabel('samples')\n mt = 'normalized mt_1342'\n bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)\n meta_df['yob'] = (meta_df['yob'] - 1944) / 60\n use_columns = ['gender', 'yob']\n sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt],\n left_index=True, right_index=True, how='left')\n sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt]\n .min()) / (sample_extra_info[mt].max() - sample_extra_info[mt].min())\n ).astype(float)\n sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)\n mt = 'norm mt_1342'\n sample_extra_info = sample_extra_info.iloc[dn1['leaves']]\n sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.\n columns, yticklabels=False, ax=bar_axis1, cmap=sns.color_palette(\n 'viridis', as_cmap=True))\n fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()\n significant_groups = []\n for oligo_subgroup in groups:\n sample_group_means = sorted(enumerate([fold_df.iloc[range(*\n sample_group), range(*oligo_subgroup)].mean().mean() for\n sample_group in groups1]), key=lambda x: -x[1])\n if sample_group_means[0][1] > 2 * sample_group_means[1][1]:\n significant_groups.append([oligo_subgroup, groups1[\n sample_group_means[0][0]]])\n draw_significant_groups(significant_groups, dendogram_ax)\n mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*\n sample_group)][mt].dropna(), sample_extra_info.iloc[list(range(0,\n sample_group[0])) + list(range(sample_group[1], len(\n sample_extra_info)))][mt].dropna())[1] for oligos_group,\n sample_group in significant_groups])\n mt_group = significant_groups[mt_scores.idxmin()]\n mt_pval = mt_scores.min()\n draw_significant_groups([mt_group], dendogram_ax, color='blue')\n draw_legume_group(mt_group[1], bar_axis1)\n plt.suptitle(\n \"\"\"For group marked in blue the %s level\nof samples in group vs those not in group\n\"\"\"\n % mt + 'got MW p-value of %g' % mt_pval)\n plt.savefig(os.path.join(out_path, 'legumes.png'))\n res = {}\n inds = sample_extra_info[mt].dropna().index\n for i in range(*mt_group[0]):\n col = fold_df.columns[i]\n res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[\n inds, col].values)\n res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')\n res['Bonf'] = res['pval'] * len(res)\n FDR = multipletests(res.pval.values.tolist(), method='fdr_by')\n res['FDR_BY'] = FDR[0]\n res['FDR_BY_qval'] = FDR[1]\n FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')\n res['FDR_BH'] = FDR[0]\n res['FDR_BH_qval'] = FDR[1]\n res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name\n print('Of %d oligos in the blue group %d pass FDR (BY) vs %s' % (len(\n res), len(res[res.FDR_BY]), mt))\n res.to_csv(os.path.join(out_path, 'mt_1342.csv'))\n", "step-3": "<mask token>\nMIN_OLIS = 200\nTHROW_BAD_OLIS = True\nMIN_APPEAR = 0.02\nCLUST_TH = 0.7\nMIN_CLUST = 10\n\n\ndef get_clusters(link, dn, inds, th=0.7):\n clst = fcluster(link, criterion='distance', t=th)\n return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]\n\n\ndef draw_significant_groups(groups, dn_ax, color='white'):\n for group in groups:\n rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -\n group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=\n color, facecolor='none')\n dn_ax.add_patch(rect)\n\n\ndef draw_legume_group(group, ax):\n y_values = ax.get_ylim()\n x_values = ax.get_xlim()\n rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,\n edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -\n group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n\n\ndef get_groups(clst, clust_above=MIN_CLUST):\n groups = []\n v = -1\n for i in range(len(clst)):\n if clst[i] == v:\n continue\n if v == -1:\n groups.append([i])\n v = clst[i]\n continue\n if i - groups[-1][0] >= clust_above:\n groups[-1].append(i)\n groups.append([i])\n else:\n groups[-1][0] = i\n v = clst[i]\n groups = groups[:-1]\n return groups\n\n\nif __name__ == '__main__':\n os.makedirs(out_path, exist_ok=True)\n df_info = pandas.read_csv(os.path.join(base_path,\n 'library_contents.csv'), index_col=0, low_memory=False)\n df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]\n inds = df_info.index\n l_base = len(inds)\n meta_df = pandas.read_csv(os.path.join(base_path, 'cohort.csv'),\n index_col=0, low_memory=False)\n meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >=\n MIN_OLIS)]\n fold_df = pandas.read_csv(os.path.join(base_path, 'fold_data.csv'),\n index_col=[0, 1], low_memory=False).loc[meta_df.index].unstack()\n fold_df.columns = fold_df.columns.get_level_values(1)\n fold_df = fold_df[fold_df.columns.intersection(inds)]\n if THROW_BAD_OLIS:\n drop = fold_df.columns[(fold_df == -1).sum() > 0]\n fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)\n inds = df_info.index.difference(drop)\n df_info = df_info.loc[inds]\n fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > MIN_APPEAR *\n len(fold_df)]]\n fold_df = numpy.log(fold_df.fillna(1))\n df_info = df_info.loc[fold_df.columns]\n th = CLUST_TH\n corr = fold_df.corr('spearman')\n link = linkage(squareform(1 - corr), method='average')\n dn = dendrogram(link, no_plot=True)\n clst = get_clusters(link, dn, corr.columns, th)\n groups = get_groups(clst)\n corr1 = fold_df.T.corr('spearman')\n link1 = linkage(squareform(1 - corr1), method='average')\n dn1 = dendrogram(link1, no_plot=True)\n clst1 = get_clusters(link1, dn1, corr1.columns, th)\n groups1 = get_groups(clst1)\n fig = plt.figure(figsize=[9.2, 12])\n gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])\n bar_ax = fig.add_subplot(gs[0])\n dendogram_ax = fig.add_subplot(gs[1])\n sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.\n color_palette('flare', as_cmap=True), ax=dendogram_ax, yticklabels=\n False, xticklabels=False, cbar_ax=bar_ax)\n dendogram_ax.set_xlabel('oligos')\n dendogram_ax.set_ylabel('samples')\n mt = 'normalized mt_1342'\n bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)\n meta_df['yob'] = (meta_df['yob'] - 1944) / 60\n use_columns = ['gender', 'yob']\n sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt],\n left_index=True, right_index=True, how='left')\n sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt]\n .min()) / (sample_extra_info[mt].max() - sample_extra_info[mt].min())\n ).astype(float)\n sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)\n mt = 'norm mt_1342'\n sample_extra_info = sample_extra_info.iloc[dn1['leaves']]\n sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.\n columns, yticklabels=False, ax=bar_axis1, cmap=sns.color_palette(\n 'viridis', as_cmap=True))\n fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()\n significant_groups = []\n for oligo_subgroup in groups:\n sample_group_means = sorted(enumerate([fold_df.iloc[range(*\n sample_group), range(*oligo_subgroup)].mean().mean() for\n sample_group in groups1]), key=lambda x: -x[1])\n if sample_group_means[0][1] > 2 * sample_group_means[1][1]:\n significant_groups.append([oligo_subgroup, groups1[\n sample_group_means[0][0]]])\n draw_significant_groups(significant_groups, dendogram_ax)\n mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*\n sample_group)][mt].dropna(), sample_extra_info.iloc[list(range(0,\n sample_group[0])) + list(range(sample_group[1], len(\n sample_extra_info)))][mt].dropna())[1] for oligos_group,\n sample_group in significant_groups])\n mt_group = significant_groups[mt_scores.idxmin()]\n mt_pval = mt_scores.min()\n draw_significant_groups([mt_group], dendogram_ax, color='blue')\n draw_legume_group(mt_group[1], bar_axis1)\n plt.suptitle(\n \"\"\"For group marked in blue the %s level\nof samples in group vs those not in group\n\"\"\"\n % mt + 'got MW p-value of %g' % mt_pval)\n plt.savefig(os.path.join(out_path, 'legumes.png'))\n res = {}\n inds = sample_extra_info[mt].dropna().index\n for i in range(*mt_group[0]):\n col = fold_df.columns[i]\n res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[\n inds, col].values)\n res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')\n res['Bonf'] = res['pval'] * len(res)\n FDR = multipletests(res.pval.values.tolist(), method='fdr_by')\n res['FDR_BY'] = FDR[0]\n res['FDR_BY_qval'] = FDR[1]\n FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')\n res['FDR_BH'] = FDR[0]\n res['FDR_BH_qval'] = FDR[1]\n res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name\n print('Of %d oligos in the blue group %d pass FDR (BY) vs %s' % (len(\n res), len(res[res.FDR_BY]), mt))\n res.to_csv(os.path.join(out_path, 'mt_1342.csv'))\n", "step-4": "from scipy.stats import mannwhitneyu\nimport matplotlib.patches as patches\nimport os\nimport numpy\nimport pandas\nfrom matplotlib.gridspec import GridSpec\nfrom scipy.cluster.hierarchy import fcluster, linkage, dendrogram\nfrom scipy.spatial.distance import squareform\nimport seaborn as sns\nfrom scipy.stats import spearmanr\nfrom statsmodels.stats.multitest import multipletests\nimport matplotlib.pyplot as plt\nfrom config import base_path, out_path\nMIN_OLIS = 200\nTHROW_BAD_OLIS = True\nMIN_APPEAR = 0.02\nCLUST_TH = 0.7\nMIN_CLUST = 10\n\n\ndef get_clusters(link, dn, inds, th=0.7):\n clst = fcluster(link, criterion='distance', t=th)\n return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]\n\n\ndef draw_significant_groups(groups, dn_ax, color='white'):\n for group in groups:\n rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -\n group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=\n color, facecolor='none')\n dn_ax.add_patch(rect)\n\n\ndef draw_legume_group(group, ax):\n y_values = ax.get_ylim()\n x_values = ax.get_xlim()\n rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,\n edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -\n group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n\n\ndef get_groups(clst, clust_above=MIN_CLUST):\n groups = []\n v = -1\n for i in range(len(clst)):\n if clst[i] == v:\n continue\n if v == -1:\n groups.append([i])\n v = clst[i]\n continue\n if i - groups[-1][0] >= clust_above:\n groups[-1].append(i)\n groups.append([i])\n else:\n groups[-1][0] = i\n v = clst[i]\n groups = groups[:-1]\n return groups\n\n\nif __name__ == '__main__':\n os.makedirs(out_path, exist_ok=True)\n df_info = pandas.read_csv(os.path.join(base_path,\n 'library_contents.csv'), index_col=0, low_memory=False)\n df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]\n inds = df_info.index\n l_base = len(inds)\n meta_df = pandas.read_csv(os.path.join(base_path, 'cohort.csv'),\n index_col=0, low_memory=False)\n meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >=\n MIN_OLIS)]\n fold_df = pandas.read_csv(os.path.join(base_path, 'fold_data.csv'),\n index_col=[0, 1], low_memory=False).loc[meta_df.index].unstack()\n fold_df.columns = fold_df.columns.get_level_values(1)\n fold_df = fold_df[fold_df.columns.intersection(inds)]\n if THROW_BAD_OLIS:\n drop = fold_df.columns[(fold_df == -1).sum() > 0]\n fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)\n inds = df_info.index.difference(drop)\n df_info = df_info.loc[inds]\n fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > MIN_APPEAR *\n len(fold_df)]]\n fold_df = numpy.log(fold_df.fillna(1))\n df_info = df_info.loc[fold_df.columns]\n th = CLUST_TH\n corr = fold_df.corr('spearman')\n link = linkage(squareform(1 - corr), method='average')\n dn = dendrogram(link, no_plot=True)\n clst = get_clusters(link, dn, corr.columns, th)\n groups = get_groups(clst)\n corr1 = fold_df.T.corr('spearman')\n link1 = linkage(squareform(1 - corr1), method='average')\n dn1 = dendrogram(link1, no_plot=True)\n clst1 = get_clusters(link1, dn1, corr1.columns, th)\n groups1 = get_groups(clst1)\n fig = plt.figure(figsize=[9.2, 12])\n gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])\n bar_ax = fig.add_subplot(gs[0])\n dendogram_ax = fig.add_subplot(gs[1])\n sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.\n color_palette('flare', as_cmap=True), ax=dendogram_ax, yticklabels=\n False, xticklabels=False, cbar_ax=bar_ax)\n dendogram_ax.set_xlabel('oligos')\n dendogram_ax.set_ylabel('samples')\n mt = 'normalized mt_1342'\n bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)\n meta_df['yob'] = (meta_df['yob'] - 1944) / 60\n use_columns = ['gender', 'yob']\n sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt],\n left_index=True, right_index=True, how='left')\n sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt]\n .min()) / (sample_extra_info[mt].max() - sample_extra_info[mt].min())\n ).astype(float)\n sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)\n mt = 'norm mt_1342'\n sample_extra_info = sample_extra_info.iloc[dn1['leaves']]\n sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.\n columns, yticklabels=False, ax=bar_axis1, cmap=sns.color_palette(\n 'viridis', as_cmap=True))\n fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()\n significant_groups = []\n for oligo_subgroup in groups:\n sample_group_means = sorted(enumerate([fold_df.iloc[range(*\n sample_group), range(*oligo_subgroup)].mean().mean() for\n sample_group in groups1]), key=lambda x: -x[1])\n if sample_group_means[0][1] > 2 * sample_group_means[1][1]:\n significant_groups.append([oligo_subgroup, groups1[\n sample_group_means[0][0]]])\n draw_significant_groups(significant_groups, dendogram_ax)\n mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*\n sample_group)][mt].dropna(), sample_extra_info.iloc[list(range(0,\n sample_group[0])) + list(range(sample_group[1], len(\n sample_extra_info)))][mt].dropna())[1] for oligos_group,\n sample_group in significant_groups])\n mt_group = significant_groups[mt_scores.idxmin()]\n mt_pval = mt_scores.min()\n draw_significant_groups([mt_group], dendogram_ax, color='blue')\n draw_legume_group(mt_group[1], bar_axis1)\n plt.suptitle(\n \"\"\"For group marked in blue the %s level\nof samples in group vs those not in group\n\"\"\"\n % mt + 'got MW p-value of %g' % mt_pval)\n plt.savefig(os.path.join(out_path, 'legumes.png'))\n res = {}\n inds = sample_extra_info[mt].dropna().index\n for i in range(*mt_group[0]):\n col = fold_df.columns[i]\n res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[\n inds, col].values)\n res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')\n res['Bonf'] = res['pval'] * len(res)\n FDR = multipletests(res.pval.values.tolist(), method='fdr_by')\n res['FDR_BY'] = FDR[0]\n res['FDR_BY_qval'] = FDR[1]\n FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')\n res['FDR_BH'] = FDR[0]\n res['FDR_BH_qval'] = FDR[1]\n res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name\n print('Of %d oligos in the blue group %d pass FDR (BY) vs %s' % (len(\n res), len(res[res.FDR_BY]), mt))\n res.to_csv(os.path.join(out_path, 'mt_1342.csv'))\n", "step-5": "from scipy.stats import mannwhitneyu\nimport matplotlib.patches as patches\nimport os\nimport numpy\nimport pandas\nfrom matplotlib.gridspec import GridSpec\nfrom scipy.cluster.hierarchy import fcluster, linkage, dendrogram\nfrom scipy.spatial.distance import squareform\nimport seaborn as sns\nfrom scipy.stats import spearmanr\nfrom statsmodels.stats.multitest import multipletests\nimport matplotlib.pyplot as plt\n\nfrom config import base_path, out_path\n\nMIN_OLIS = 200\nTHROW_BAD_OLIS = True\nMIN_APPEAR = 0.02\nCLUST_TH = 0.7\nMIN_CLUST = 10\n\n\ndef get_clusters(link, dn, inds, th=0.7):\n clst = fcluster(link, criterion='distance', t=th)\n return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]\n\n\ndef draw_significant_groups(groups, dn_ax, color='white'):\n # Draw boxes around clusters\n for group in groups:\n rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] - group[0][0], group[1][1] - group[1][0],\n linewidth=1, edgecolor=color, facecolor='none')\n dn_ax.add_patch(rect)\n\n\ndef draw_legume_group(group, ax):\n y_values = ax.get_ylim()\n x_values = ax.get_xlim()\n rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1, edgecolor='white',\n facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] - group[1], linewidth=1, edgecolor='white',\n facecolor='white', alpha=0.6)\n ax.add_patch(rect)\n\n\ndef get_groups(clst, clust_above=MIN_CLUST):\n groups = []\n v = -1\n for i in range(len(clst)):\n if clst[i] == v:\n continue\n if v == -1:\n groups.append([i])\n v = clst[i]\n continue\n if (i - groups[-1][0]) >= clust_above:\n groups[-1].append(i)\n groups.append([i])\n else:\n groups[-1][0] = i\n v = clst[i]\n groups = groups[:-1]\n return groups\n\n\nif __name__ == \"__main__\":\n os.makedirs(out_path, exist_ok=True)\n\n df_info = pandas.read_csv(os.path.join(base_path, \"library_contents.csv\"), index_col=0, low_memory=False)\n df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]\n inds = df_info.index\n l_base = len(inds)\n\n meta_df = pandas.read_csv(os.path.join(base_path, \"cohort.csv\"), index_col=0, low_memory=False)\n meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >= MIN_OLIS)]\n\n fold_df = pandas.read_csv(os.path.join(base_path, \"fold_data.csv\"), index_col=[0, 1],\n low_memory=False).loc[meta_df.index].unstack()\n fold_df.columns = fold_df.columns.get_level_values(1)\n fold_df = fold_df[fold_df.columns.intersection(inds)]\n\n if THROW_BAD_OLIS:\n drop = fold_df.columns[(fold_df == -1).sum() > 0]\n fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)\n inds = df_info.index.difference(drop)\n df_info = df_info.loc[inds]\n\n fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > (MIN_APPEAR * len(fold_df))]]\n fold_df = numpy.log(fold_df.fillna(1))\n df_info = df_info.loc[fold_df.columns]\n\n th = CLUST_TH\n\n # Oligos level correlations\n corr = fold_df.corr('spearman')\n link = linkage(squareform(1 - corr), method='average')\n dn = dendrogram(link, no_plot=True)\n clst = get_clusters(link, dn, corr.columns, th)\n groups = get_groups(clst)\n\n # Samples level correlations\n corr1 = fold_df.T.corr('spearman')\n link1 = linkage(squareform(1 - corr1), method='average')\n dn1 = dendrogram(link1, no_plot=True)\n clst1 = get_clusters(link1, dn1, corr1.columns, th)\n groups1 = get_groups(clst1)\n\n # Define figure\n fig = plt.figure(figsize=[9.2, 12])\n gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])\n\n # Plot heatmap\n bar_ax = fig.add_subplot(gs[0])\n dendogram_ax = fig.add_subplot(gs[1])\n sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.color_palette('flare', as_cmap=True),\n ax=dendogram_ax, yticklabels=False, xticklabels=False, cbar_ax=bar_ax)\n\n dendogram_ax.set_xlabel(\"oligos\")\n dendogram_ax.set_ylabel(\"samples\")\n\n # Plot sample level bars\n mt = 'normalized mt_1342'\n bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)\n meta_df['yob'] = (meta_df['yob'] - 1944) / 60\n use_columns = ['gender', 'yob']\n sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt], left_index=True,\n right_index=True, how='left')\n sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt].min()) /\n (sample_extra_info[mt].max() - sample_extra_info[mt].min())).astype(float)\n sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)\n mt = 'norm mt_1342'\n sample_extra_info = sample_extra_info.iloc[dn1['leaves']]\n sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.columns, yticklabels=False,\n ax=bar_axis1, cmap=sns.color_palette(\"viridis\", as_cmap=True))\n\n # Compute significant shared groups\n fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()\n significant_groups = []\n for oligo_subgroup in groups:\n sample_group_means = sorted(enumerate(\n [fold_df.iloc[range(*sample_group), range(*oligo_subgroup)].mean().mean() for sample_group in groups1]),\n key=lambda x: -x[1])\n if sample_group_means[0][1] > 2 * sample_group_means[1][1]:\n significant_groups.append([oligo_subgroup, groups1[sample_group_means[0][0]]])\n draw_significant_groups(significant_groups, dendogram_ax)\n\n mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*sample_group)][mt].dropna(),\n sample_extra_info.iloc[list(range(0, sample_group[0])) +\n list(range(sample_group[1], len(sample_extra_info)))]\n [mt].dropna())[1]\n for oligos_group, sample_group in significant_groups])\n mt_group = significant_groups[mt_scores.idxmin()]\n mt_pval = mt_scores.min()\n draw_significant_groups([mt_group], dendogram_ax, color='blue')\n draw_legume_group(mt_group[1], bar_axis1)\n plt.suptitle('For group marked in blue the %s level\\nof samples in group vs those not in group\\n' % mt +\n 'got MW p-value of %g' % mt_pval)\n\n plt.savefig(os.path.join(out_path, \"legumes.png\"))\n\n res = {}\n inds = sample_extra_info[mt].dropna().index\n for i in range(*mt_group[0]):\n col = fold_df.columns[i]\n res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[inds, col].values)\n res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')\n res[\"Bonf\"] = res['pval'] * len(res)\n FDR = multipletests(res.pval.values.tolist(), method='fdr_by')\n res[\"FDR_BY\"] = FDR[0]\n res['FDR_BY_qval'] = FDR[1]\n FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')\n res[\"FDR_BH\"] = FDR[0]\n res['FDR_BH_qval'] = FDR[1]\n res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name\n\n print(\"Of %d oligos in the blue group %d pass FDR (BY) vs %s\" % (len(res), len(res[res.FDR_BY]), mt))\n res.to_csv(os.path.join(out_path, \"mt_1342.csv\"))\n\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
from datetime import datetime import whois def age_domain(url): try: w = whois.whois(url) if(w): for l in w.expiration_date: d1 = datetime.date(l) print(d1) for l1 in w.creation_date: d2 = datetime.date(l1) print(d2) diff = (d1 - d2).days print(diff) if ((diff / 30) < 6): return 1 else: return 0 except: return -1
normal
{ "blob_id": "07d574060ded0d98734b4f184dcba7377b3a5480", "index": 685, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef age_domain(url):\n try:\n w = whois.whois(url)\n if w:\n for l in w.expiration_date:\n d1 = datetime.date(l)\n print(d1)\n for l1 in w.creation_date:\n d2 = datetime.date(l1)\n print(d2)\n diff = (d1 - d2).days\n print(diff)\n if diff / 30 < 6:\n return 1\n else:\n return 0\n except:\n return -1\n", "step-3": "from datetime import datetime\nimport whois\n\n\ndef age_domain(url):\n try:\n w = whois.whois(url)\n if w:\n for l in w.expiration_date:\n d1 = datetime.date(l)\n print(d1)\n for l1 in w.creation_date:\n d2 = datetime.date(l1)\n print(d2)\n diff = (d1 - d2).days\n print(diff)\n if diff / 30 < 6:\n return 1\n else:\n return 0\n except:\n return -1\n", "step-4": "from datetime import datetime\r\n\r\nimport whois\r\n\r\n\r\ndef age_domain(url):\r\n try:\r\n w = whois.whois(url)\r\n if(w):\r\n for l in w.expiration_date:\r\n d1 = datetime.date(l)\r\n print(d1)\r\n for l1 in w.creation_date:\r\n d2 = datetime.date(l1)\r\n print(d2)\r\n diff = (d1 - d2).days\r\n print(diff)\r\n if ((diff / 30) < 6):\r\n return 1\r\n else:\r\n return 0\r\n except:\r\n return -1\r\n\r\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from django import forms from . import models from .validators import validate_metadata class ServiceProviderForm(forms.ModelForm): xml = forms.CharField(label='SAML Metadata XML', widget=forms.Textarea, validators=[validate_metadata]) class Meta: model = models.ServiceProvider fields = ('xml',)
normal
{ "blob_id": "e018d28cbacb568596eb9a5134581db960111e14", "index": 9835, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass ServiceProviderForm(forms.ModelForm):\n <mask token>\n\n\n class Meta:\n model = models.ServiceProvider\n fields = 'xml',\n", "step-3": "<mask token>\n\n\nclass ServiceProviderForm(forms.ModelForm):\n xml = forms.CharField(label='SAML Metadata XML', widget=forms.Textarea,\n validators=[validate_metadata])\n\n\n class Meta:\n model = models.ServiceProvider\n fields = 'xml',\n", "step-4": "from django import forms\nfrom . import models\nfrom .validators import validate_metadata\n\n\nclass ServiceProviderForm(forms.ModelForm):\n xml = forms.CharField(label='SAML Metadata XML', widget=forms.Textarea,\n validators=[validate_metadata])\n\n\n class Meta:\n model = models.ServiceProvider\n fields = 'xml',\n", "step-5": "from django import forms\n\nfrom . import models\nfrom .validators import validate_metadata\n\n\nclass ServiceProviderForm(forms.ModelForm):\n xml = forms.CharField(label='SAML Metadata XML',\n widget=forms.Textarea,\n validators=[validate_metadata])\n\n class Meta:\n model = models.ServiceProvider\n fields = ('xml',)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class ForgotForm(FlaskForm): email = EmailField('Email Id*', validators=[DataRequired(), Email()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Change your Password') class changepassword(FlaskForm): password = PasswordField('Enter Password', validators=[DataRequired()]) submit = SubmitField('Change Password') class ComplaintForm(FlaskForm): fname = StringField('Full Name *', validators=[DataRequired()]) email = EmailField('Email Id*', validators=[DataRequired(), Email()]) date = DateField('Date', validators=[DataRequired()]) degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ( 'masters', 'Masters')], validators=[DataRequired()]) semester = SelectField(u'Semester*', choices=[('first', 'First'), ( 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ( 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ( 'eighth', 'Eighth')], validators=[DataRequired()]) complaintcategory = SelectField(u'Complain Category*', choices=[( 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ( 'academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()]) message = TextAreaField('Enter Complain Details', validators=[ DataRequired(), Length(max=100)]) submit = SubmitField('Submit') class complaint_status(FlaskForm): status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')] ) submit = SubmitField('Update') <|reserved_special_token_1|> <|reserved_special_token_0|> class LoginForm(FlaskForm): email = EmailField('Email Id*', validators=[DataRequired(), Email()]) password = PasswordField('Password*', validators=[DataRequired()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Login >>') class ForgotForm(FlaskForm): email = EmailField('Email Id*', validators=[DataRequired(), Email()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Change your Password') class changepassword(FlaskForm): password = PasswordField('Enter Password', validators=[DataRequired()]) submit = SubmitField('Change Password') class ComplaintForm(FlaskForm): fname = StringField('Full Name *', validators=[DataRequired()]) email = EmailField('Email Id*', validators=[DataRequired(), Email()]) date = DateField('Date', validators=[DataRequired()]) degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ( 'masters', 'Masters')], validators=[DataRequired()]) semester = SelectField(u'Semester*', choices=[('first', 'First'), ( 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ( 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ( 'eighth', 'Eighth')], validators=[DataRequired()]) complaintcategory = SelectField(u'Complain Category*', choices=[( 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ( 'academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()]) message = TextAreaField('Enter Complain Details', validators=[ DataRequired(), Length(max=100)]) submit = SubmitField('Submit') class complaint_status(FlaskForm): status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')] ) submit = SubmitField('Update') <|reserved_special_token_1|> <|reserved_special_token_0|> class SignUpForm(FlaskForm): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class LoginForm(FlaskForm): email = EmailField('Email Id*', validators=[DataRequired(), Email()]) password = PasswordField('Password*', validators=[DataRequired()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Login >>') class ForgotForm(FlaskForm): email = EmailField('Email Id*', validators=[DataRequired(), Email()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Change your Password') class changepassword(FlaskForm): password = PasswordField('Enter Password', validators=[DataRequired()]) submit = SubmitField('Change Password') class ComplaintForm(FlaskForm): fname = StringField('Full Name *', validators=[DataRequired()]) email = EmailField('Email Id*', validators=[DataRequired(), Email()]) date = DateField('Date', validators=[DataRequired()]) degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ( 'masters', 'Masters')], validators=[DataRequired()]) semester = SelectField(u'Semester*', choices=[('first', 'First'), ( 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ( 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ( 'eighth', 'Eighth')], validators=[DataRequired()]) complaintcategory = SelectField(u'Complain Category*', choices=[( 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ( 'academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()]) message = TextAreaField('Enter Complain Details', validators=[ DataRequired(), Length(max=100)]) submit = SubmitField('Submit') class complaint_status(FlaskForm): status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')] ) submit = SubmitField('Update') <|reserved_special_token_1|> <|reserved_special_token_0|> class SignUpForm(FlaskForm): id = StringField('ID*', validators=[DataRequired()]) fname = StringField('Full Name*', validators=[DataRequired()]) email = EmailField('Email Id*', validators=[DataRequired(), Email()]) password = PasswordField('Password*', validators=[DataRequired()]) contactno = TelField('Mobile No*.', validators=[DataRequired(), Length( min=10, max=10)]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Sign Up >>') class LoginForm(FlaskForm): email = EmailField('Email Id*', validators=[DataRequired(), Email()]) password = PasswordField('Password*', validators=[DataRequired()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Login >>') class ForgotForm(FlaskForm): email = EmailField('Email Id*', validators=[DataRequired(), Email()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ( 'stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Change your Password') class changepassword(FlaskForm): password = PasswordField('Enter Password', validators=[DataRequired()]) submit = SubmitField('Change Password') class ComplaintForm(FlaskForm): fname = StringField('Full Name *', validators=[DataRequired()]) email = EmailField('Email Id*', validators=[DataRequired(), Email()]) date = DateField('Date', validators=[DataRequired()]) degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ( 'masters', 'Masters')], validators=[DataRequired()]) semester = SelectField(u'Semester*', choices=[('first', 'First'), ( 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ( 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ( 'eighth', 'Eighth')], validators=[DataRequired()]) complaintcategory = SelectField(u'Complain Category*', choices=[( 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ( 'academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()]) message = TextAreaField('Enter Complain Details', validators=[ DataRequired(), Length(max=100)]) submit = SubmitField('Submit') class complaint_status(FlaskForm): status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')] ) submit = SubmitField('Update') <|reserved_special_token_1|> from flask_wtf import FlaskForm from wtforms import StringField, SelectField,SubmitField, PasswordField, RadioField, MultipleFileField, SubmitField, TextAreaField from wtforms.fields.html5 import EmailField, TelField, DateField from wtforms.validators import DataRequired, Email, Length, InputRequired class SignUpForm(FlaskForm): id = StringField('ID*', validators=[DataRequired()]) fname = StringField('Full Name*', validators=[DataRequired()]) email = EmailField('Email Id*',validators=[DataRequired(), Email()]) password = PasswordField('Password*', validators=[DataRequired()]) contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(min=10, max=10)]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Sign Up >>') class LoginForm(FlaskForm): email = EmailField('Email Id*',validators=[DataRequired(), Email()]) password = PasswordField('Password*', validators=[DataRequired()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Login >>') class ForgotForm(FlaskForm): email = EmailField('Email Id*',validators=[DataRequired(), Email()]) design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()]) submit = SubmitField('Change your Password') class changepassword(FlaskForm): password = PasswordField('Enter Password', validators=[DataRequired()]) submit = SubmitField('Change Password') class ComplaintForm(FlaskForm): fname = StringField('Full Name *', validators=[DataRequired()]) email = EmailField('Email Id*',validators=[DataRequired(), Email()]) date = DateField('Date', validators=[DataRequired()]) degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ('masters', 'Masters')], validators=[DataRequired()]) semester = SelectField(u'Semester*', choices=[('first', 'First'), ('second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ('fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ('eighth', 'Eighth')], validators=[DataRequired()]) complaintcategory = SelectField(u'Complain Category*', choices=[('infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ('academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()]) message = TextAreaField('Enter Complain Details', validators=[DataRequired(), Length(max=100)]) #file = MultipleFileField(u'Upload File') submit = SubmitField('Submit') class complaint_status(FlaskForm): status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]) submit = SubmitField('Update')
flexible
{ "blob_id": "32ed07a89a6f929a6c4b78fd79e687b85e01015b", "index": 535, "step-1": "<mask token>\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n", "step-2": "<mask token>\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n", "step-3": "<mask token>\n\n\nclass SignUpForm(FlaskForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n", "step-4": "<mask token>\n\n\nclass SignUpForm(FlaskForm):\n id = StringField('ID*', validators=[DataRequired()])\n fname = StringField('Full Name*', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(\n min=10, max=10)])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Sign Up >>')\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n", "step-5": "from flask_wtf import FlaskForm\nfrom wtforms import StringField, SelectField,SubmitField, PasswordField, RadioField, MultipleFileField, SubmitField, TextAreaField\nfrom wtforms.fields.html5 import EmailField, TelField, DateField\nfrom wtforms.validators import DataRequired, Email, Length, InputRequired\n\nclass SignUpForm(FlaskForm):\n id = StringField('ID*', validators=[DataRequired()])\n fname = StringField('Full Name*', validators=[DataRequired()])\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(min=10, max=10)])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Sign Up >>')\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ('masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), ('second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ('fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ('eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[('infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ('academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[DataRequired(), Length(max=100)])\n #file = MultipleFileField(u'Upload File')\n submit = SubmitField('Submit')\n\n\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')])\n submit = SubmitField('Update')\n", "step-ids": [ 8, 10, 11, 12, 14 ] }
[ 8, 10, 11, 12, 14 ]
from sklearn.model_selection import train_test_split from azureml.core import Run from sklearn.ensemble import RandomForestClassifier import pandas as pd import argparse import os import joblib import numpy as np # Get the experiment run context run = Run.get_context() # Get arguments parser = argparse.ArgumentParser() parser.add_argument('--in_n_estimator', type=int, default=8) parser.add_argument('--in_criterion', type=str, default="gini") parser.add_argument('--in_max_depth', type=int, default=2) args = parser.parse_args() in_n_estimators = args.in_n_estimator in_criterion = args.in_criterion in_max_depth = args.in_max_depth # read prepared data df = pd.read_csv("prepared_data.csv") columns = df.iloc[1:2, :-1].columns x = df[columns] y = df.iloc[:, -1:] # split data into train and test x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25, random_state=2) # “gini”, “entropy” model = RandomForestClassifier(n_estimators=in_n_estimators, criterion=in_criterion, max_depth=in_max_depth) model.fit(x_train, y_train) accuracy = model.score(x_test, y_test) run.log("Accuracy", float(accuracy)) os.makedirs('outputs', exist_ok=True) joblib.dump(model, 'outputs/model_forest.joblib')
normal
{ "blob_id": "66c2d73c100f7fc802e66f2762c92664e4b93fcd", "index": 5736, "step-1": "<mask token>\n", "step-2": "<mask token>\nparser.add_argument('--in_n_estimator', type=int, default=8)\nparser.add_argument('--in_criterion', type=str, default='gini')\nparser.add_argument('--in_max_depth', type=int, default=2)\n<mask token>\nmodel.fit(x_train, y_train)\n<mask token>\nrun.log('Accuracy', float(accuracy))\nos.makedirs('outputs', exist_ok=True)\njoblib.dump(model, 'outputs/model_forest.joblib')\n", "step-3": "<mask token>\nrun = Run.get_context()\nparser = argparse.ArgumentParser()\nparser.add_argument('--in_n_estimator', type=int, default=8)\nparser.add_argument('--in_criterion', type=str, default='gini')\nparser.add_argument('--in_max_depth', type=int, default=2)\nargs = parser.parse_args()\nin_n_estimators = args.in_n_estimator\nin_criterion = args.in_criterion\nin_max_depth = args.in_max_depth\ndf = pd.read_csv('prepared_data.csv')\ncolumns = df.iloc[1:2, :-1].columns\nx = df[columns]\ny = df.iloc[:, -1:]\nx_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25,\n random_state=2)\nmodel = RandomForestClassifier(n_estimators=in_n_estimators, criterion=\n in_criterion, max_depth=in_max_depth)\nmodel.fit(x_train, y_train)\naccuracy = model.score(x_test, y_test)\nrun.log('Accuracy', float(accuracy))\nos.makedirs('outputs', exist_ok=True)\njoblib.dump(model, 'outputs/model_forest.joblib')\n", "step-4": "from sklearn.model_selection import train_test_split\nfrom azureml.core import Run\nfrom sklearn.ensemble import RandomForestClassifier\nimport pandas as pd\nimport argparse\nimport os\nimport joblib\nimport numpy as np\nrun = Run.get_context()\nparser = argparse.ArgumentParser()\nparser.add_argument('--in_n_estimator', type=int, default=8)\nparser.add_argument('--in_criterion', type=str, default='gini')\nparser.add_argument('--in_max_depth', type=int, default=2)\nargs = parser.parse_args()\nin_n_estimators = args.in_n_estimator\nin_criterion = args.in_criterion\nin_max_depth = args.in_max_depth\ndf = pd.read_csv('prepared_data.csv')\ncolumns = df.iloc[1:2, :-1].columns\nx = df[columns]\ny = df.iloc[:, -1:]\nx_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25,\n random_state=2)\nmodel = RandomForestClassifier(n_estimators=in_n_estimators, criterion=\n in_criterion, max_depth=in_max_depth)\nmodel.fit(x_train, y_train)\naccuracy = model.score(x_test, y_test)\nrun.log('Accuracy', float(accuracy))\nos.makedirs('outputs', exist_ok=True)\njoblib.dump(model, 'outputs/model_forest.joblib')\n", "step-5": "from sklearn.model_selection import train_test_split\nfrom azureml.core import Run\nfrom sklearn.ensemble import RandomForestClassifier\nimport pandas as pd\nimport argparse\nimport os\nimport joblib\nimport numpy as np\n\n\n# Get the experiment run context\nrun = Run.get_context()\n\n# Get arguments\nparser = argparse.ArgumentParser()\nparser.add_argument('--in_n_estimator', type=int, default=8)\nparser.add_argument('--in_criterion', type=str, default=\"gini\")\nparser.add_argument('--in_max_depth', type=int, default=2)\n\nargs = parser.parse_args()\nin_n_estimators = args.in_n_estimator\nin_criterion = args.in_criterion\nin_max_depth = args.in_max_depth\n\n\n# read prepared data\ndf = pd.read_csv(\"prepared_data.csv\")\ncolumns = df.iloc[1:2, :-1].columns\nx = df[columns]\ny = df.iloc[:, -1:]\n\n# split data into train and test\nx_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25, random_state=2)\n\n# “gini”, “entropy”\nmodel = RandomForestClassifier(n_estimators=in_n_estimators, criterion=in_criterion, max_depth=in_max_depth)\n\nmodel.fit(x_train, y_train)\n\naccuracy = model.score(x_test, y_test)\nrun.log(\"Accuracy\", float(accuracy))\n\nos.makedirs('outputs', exist_ok=True)\njoblib.dump(model, 'outputs/model_forest.joblib')\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
""" Created on Dec 1, 2014 @author: Ira Fich """ import random from igfig.containers import WeightedList class Replacer(): """ A class that replaces itself with a subclass of itself when you instantiate it """ subclass_weight = 0 def __new__(cls, *args, **kwargs): subs = WeightedList(cls.__subclasses__(), [sub.subclass_weight for sub in cls.__subclasses__()]) if subs and cls.go_deeper(subs): newcls = subs.random_choice() return newcls.__new__(newcls, *args, **kwargs) #TODO: check for valid_endpoint() return super().__new__(cls) @classmethod def go_deeper(cls, *args, **kwargs): """ should we go deeper or not when we're given the option? You probably want to override this. For example: return random.randint(0, len(args[0])) and usually you'll check cls.valid_endpoint() too """ return True @classmethod def valid_endpoint(cls): """ is this class a valid point to end our search on? Probably want to override this too, as we may in the future want to be able to end on non-leaf nodes May want to combine this with go_deeper in some way, eventually """ return cls.__subclasses__() == [] @classmethod def get_all_subclasses(cls, filterfn=lambda x:x): subs = [] subs_stack = [cls] while subs_stack: current = subs_stack.pop(0) subs_stack += current.__subclasses__() if filterfn(current): subs.append(current) return subs @classmethod def count_subclass_weights(top_class): """ call this after you create all the classes in question, but before you create any instances of them usually this means put all your related Replacer subclasses in one file, and call this at the end of the file """ for cls in reversed(top_class.get_all_subclasses()): cls.subclass_weight = 0 #reset everything in case we've called this function before if cls.valid_endpoint(): cls.subclass_weight += 1 for subclass in cls.__subclasses__(): cls.subclass_weight += subclass.subclass_weight return {cls: cls.subclass_weight for cls in top_class.get_all_subclasses()} class UniqueReplacer(Replacer): """ variant of Replacer that doesn't permit the same subclass to be selected more than once in a given context """ pass class TentativeAssignment(object): #TODO: Rename? """ tentative assignment of keys to values in a constraint-satisfaction problem. Currently it's basically a holder for a DFS "string", with the ability to lock in values (with various degrees of lockedness?) Later, might get a more treelike structure of dependencies to reduce backtracking. Question: aren't there already constraint-satisfaction modules that might do what I want more effectively? They might be too limited, though... they all tend to work to find optimal solutions within finite domains, whereas my stuff tends to be looking for one of many possible good solutions in a near-infinite domain. """ pass if __name__ == "__main__": pass
normal
{ "blob_id": "3a878c91218dfbf23477ae5b7561e9eecfcd1350", "index": 5053, "step-1": "<mask token>\n\n\nclass Replacer:\n <mask token>\n <mask token>\n\n def __new__(cls, *args, **kwargs):\n subs = WeightedList(cls.__subclasses__(), [sub.subclass_weight for\n sub in cls.__subclasses__()])\n if subs and cls.go_deeper(subs):\n newcls = subs.random_choice()\n return newcls.__new__(newcls, *args, **kwargs)\n return super().__new__(cls)\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def count_subclass_weights(top_class):\n \"\"\"\n\t\tcall this after you create all the classes in question, but before you create any instances of them\n\t\tusually this means put all your related Replacer subclasses in one file, and call this at the end of the file \n\t\t\"\"\"\n for cls in reversed(top_class.get_all_subclasses()):\n cls.subclass_weight = 0\n if cls.valid_endpoint():\n cls.subclass_weight += 1\n for subclass in cls.__subclasses__():\n cls.subclass_weight += subclass.subclass_weight\n return {cls: cls.subclass_weight for cls in top_class.\n get_all_subclasses()}\n\n\nclass UniqueReplacer(Replacer):\n \"\"\"\n\tvariant of Replacer that doesn't permit the same subclass to be selected more than once in a given context\n\t\"\"\"\n pass\n\n\nclass TentativeAssignment(object):\n \"\"\"\n\ttentative assignment of keys to values in a constraint-satisfaction problem.\n\t\n\tCurrently it's basically a holder for a DFS \"string\", with the ability to lock in values (with various degrees of lockedness?)\n\tLater, might get a more treelike structure of dependencies to reduce backtracking.\n\t\n\tQuestion: aren't there already constraint-satisfaction modules that might do what I want more effectively?\n\tThey might be too limited, though... they all tend to work to find optimal solutions within finite domains, whereas my stuff tends \n\tto be looking for one of many possible good solutions in a near-infinite domain. \n\t\"\"\"\n pass\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Replacer:\n <mask token>\n <mask token>\n\n def __new__(cls, *args, **kwargs):\n subs = WeightedList(cls.__subclasses__(), [sub.subclass_weight for\n sub in cls.__subclasses__()])\n if subs and cls.go_deeper(subs):\n newcls = subs.random_choice()\n return newcls.__new__(newcls, *args, **kwargs)\n return super().__new__(cls)\n\n @classmethod\n def go_deeper(cls, *args, **kwargs):\n \"\"\"\n\t\tshould we go deeper or not when we're given the option?\n\t\t\n\t\tYou probably want to override this. For example:\n\t\treturn random.randint(0, len(args[0]))\n\t\t\n\t\tand usually you'll check cls.valid_endpoint() too\n\t\t\"\"\"\n return True\n\n @classmethod\n def valid_endpoint(cls):\n \"\"\"\n\t\tis this class a valid point to end our search on?\n\t\t\n\t\tProbably want to override this too, as we may in the future\n\t\twant to be able to end on non-leaf nodes\n\t\t\n\t\tMay want to combine this with go_deeper in some way, eventually\n\t\t\"\"\"\n return cls.__subclasses__() == []\n\n @classmethod\n def get_all_subclasses(cls, filterfn=lambda x: x):\n subs = []\n subs_stack = [cls]\n while subs_stack:\n current = subs_stack.pop(0)\n subs_stack += current.__subclasses__()\n if filterfn(current):\n subs.append(current)\n return subs\n\n @classmethod\n def count_subclass_weights(top_class):\n \"\"\"\n\t\tcall this after you create all the classes in question, but before you create any instances of them\n\t\tusually this means put all your related Replacer subclasses in one file, and call this at the end of the file \n\t\t\"\"\"\n for cls in reversed(top_class.get_all_subclasses()):\n cls.subclass_weight = 0\n if cls.valid_endpoint():\n cls.subclass_weight += 1\n for subclass in cls.__subclasses__():\n cls.subclass_weight += subclass.subclass_weight\n return {cls: cls.subclass_weight for cls in top_class.\n get_all_subclasses()}\n\n\nclass UniqueReplacer(Replacer):\n \"\"\"\n\tvariant of Replacer that doesn't permit the same subclass to be selected more than once in a given context\n\t\"\"\"\n pass\n\n\nclass TentativeAssignment(object):\n \"\"\"\n\ttentative assignment of keys to values in a constraint-satisfaction problem.\n\t\n\tCurrently it's basically a holder for a DFS \"string\", with the ability to lock in values (with various degrees of lockedness?)\n\tLater, might get a more treelike structure of dependencies to reduce backtracking.\n\t\n\tQuestion: aren't there already constraint-satisfaction modules that might do what I want more effectively?\n\tThey might be too limited, though... they all tend to work to find optimal solutions within finite domains, whereas my stuff tends \n\tto be looking for one of many possible good solutions in a near-infinite domain. \n\t\"\"\"\n pass\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Replacer:\n <mask token>\n subclass_weight = 0\n\n def __new__(cls, *args, **kwargs):\n subs = WeightedList(cls.__subclasses__(), [sub.subclass_weight for\n sub in cls.__subclasses__()])\n if subs and cls.go_deeper(subs):\n newcls = subs.random_choice()\n return newcls.__new__(newcls, *args, **kwargs)\n return super().__new__(cls)\n\n @classmethod\n def go_deeper(cls, *args, **kwargs):\n \"\"\"\n\t\tshould we go deeper or not when we're given the option?\n\t\t\n\t\tYou probably want to override this. For example:\n\t\treturn random.randint(0, len(args[0]))\n\t\t\n\t\tand usually you'll check cls.valid_endpoint() too\n\t\t\"\"\"\n return True\n\n @classmethod\n def valid_endpoint(cls):\n \"\"\"\n\t\tis this class a valid point to end our search on?\n\t\t\n\t\tProbably want to override this too, as we may in the future\n\t\twant to be able to end on non-leaf nodes\n\t\t\n\t\tMay want to combine this with go_deeper in some way, eventually\n\t\t\"\"\"\n return cls.__subclasses__() == []\n\n @classmethod\n def get_all_subclasses(cls, filterfn=lambda x: x):\n subs = []\n subs_stack = [cls]\n while subs_stack:\n current = subs_stack.pop(0)\n subs_stack += current.__subclasses__()\n if filterfn(current):\n subs.append(current)\n return subs\n\n @classmethod\n def count_subclass_weights(top_class):\n \"\"\"\n\t\tcall this after you create all the classes in question, but before you create any instances of them\n\t\tusually this means put all your related Replacer subclasses in one file, and call this at the end of the file \n\t\t\"\"\"\n for cls in reversed(top_class.get_all_subclasses()):\n cls.subclass_weight = 0\n if cls.valid_endpoint():\n cls.subclass_weight += 1\n for subclass in cls.__subclasses__():\n cls.subclass_weight += subclass.subclass_weight\n return {cls: cls.subclass_weight for cls in top_class.\n get_all_subclasses()}\n\n\nclass UniqueReplacer(Replacer):\n \"\"\"\n\tvariant of Replacer that doesn't permit the same subclass to be selected more than once in a given context\n\t\"\"\"\n pass\n\n\nclass TentativeAssignment(object):\n \"\"\"\n\ttentative assignment of keys to values in a constraint-satisfaction problem.\n\t\n\tCurrently it's basically a holder for a DFS \"string\", with the ability to lock in values (with various degrees of lockedness?)\n\tLater, might get a more treelike structure of dependencies to reduce backtracking.\n\t\n\tQuestion: aren't there already constraint-satisfaction modules that might do what I want more effectively?\n\tThey might be too limited, though... they all tend to work to find optimal solutions within finite domains, whereas my stuff tends \n\tto be looking for one of many possible good solutions in a near-infinite domain. \n\t\"\"\"\n pass\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass Replacer:\n \"\"\"\n\tA class that replaces itself with a subclass of itself when you instantiate it\n\t\"\"\"\n subclass_weight = 0\n\n def __new__(cls, *args, **kwargs):\n subs = WeightedList(cls.__subclasses__(), [sub.subclass_weight for\n sub in cls.__subclasses__()])\n if subs and cls.go_deeper(subs):\n newcls = subs.random_choice()\n return newcls.__new__(newcls, *args, **kwargs)\n return super().__new__(cls)\n\n @classmethod\n def go_deeper(cls, *args, **kwargs):\n \"\"\"\n\t\tshould we go deeper or not when we're given the option?\n\t\t\n\t\tYou probably want to override this. For example:\n\t\treturn random.randint(0, len(args[0]))\n\t\t\n\t\tand usually you'll check cls.valid_endpoint() too\n\t\t\"\"\"\n return True\n\n @classmethod\n def valid_endpoint(cls):\n \"\"\"\n\t\tis this class a valid point to end our search on?\n\t\t\n\t\tProbably want to override this too, as we may in the future\n\t\twant to be able to end on non-leaf nodes\n\t\t\n\t\tMay want to combine this with go_deeper in some way, eventually\n\t\t\"\"\"\n return cls.__subclasses__() == []\n\n @classmethod\n def get_all_subclasses(cls, filterfn=lambda x: x):\n subs = []\n subs_stack = [cls]\n while subs_stack:\n current = subs_stack.pop(0)\n subs_stack += current.__subclasses__()\n if filterfn(current):\n subs.append(current)\n return subs\n\n @classmethod\n def count_subclass_weights(top_class):\n \"\"\"\n\t\tcall this after you create all the classes in question, but before you create any instances of them\n\t\tusually this means put all your related Replacer subclasses in one file, and call this at the end of the file \n\t\t\"\"\"\n for cls in reversed(top_class.get_all_subclasses()):\n cls.subclass_weight = 0\n if cls.valid_endpoint():\n cls.subclass_weight += 1\n for subclass in cls.__subclasses__():\n cls.subclass_weight += subclass.subclass_weight\n return {cls: cls.subclass_weight for cls in top_class.\n get_all_subclasses()}\n\n\nclass UniqueReplacer(Replacer):\n \"\"\"\n\tvariant of Replacer that doesn't permit the same subclass to be selected more than once in a given context\n\t\"\"\"\n pass\n\n\nclass TentativeAssignment(object):\n \"\"\"\n\ttentative assignment of keys to values in a constraint-satisfaction problem.\n\t\n\tCurrently it's basically a holder for a DFS \"string\", with the ability to lock in values (with various degrees of lockedness?)\n\tLater, might get a more treelike structure of dependencies to reduce backtracking.\n\t\n\tQuestion: aren't there already constraint-satisfaction modules that might do what I want more effectively?\n\tThey might be too limited, though... they all tend to work to find optimal solutions within finite domains, whereas my stuff tends \n\tto be looking for one of many possible good solutions in a near-infinite domain. \n\t\"\"\"\n pass\n\n\nif __name__ == '__main__':\n pass\n", "step-5": "\"\"\"\nCreated on Dec 1, 2014\n\n@author: Ira Fich\n\"\"\"\n\n\nimport random\nfrom igfig.containers import WeightedList\n\n\nclass Replacer():\n\t\"\"\"\n\tA class that replaces itself with a subclass of itself when you instantiate it\n\t\"\"\"\n\tsubclass_weight = 0\n\t\n\tdef __new__(cls, *args, **kwargs):\n\t\tsubs = WeightedList(cls.__subclasses__(), [sub.subclass_weight for sub in cls.__subclasses__()])\n\t\t\n\t\tif subs and cls.go_deeper(subs): \n\t\t\tnewcls = subs.random_choice()\n\t\t\treturn newcls.__new__(newcls, *args, **kwargs)\n\t\t\n\t\t#TODO: check for valid_endpoint()\n\t\treturn super().__new__(cls)\n\t\n\t\n\t@classmethod\n\tdef go_deeper(cls, *args, **kwargs):\n\t\t\"\"\"\n\t\tshould we go deeper or not when we're given the option?\n\t\t\n\t\tYou probably want to override this. For example:\n\t\treturn random.randint(0, len(args[0]))\n\t\t\n\t\tand usually you'll check cls.valid_endpoint() too\n\t\t\"\"\"\n\t\treturn True\n\t\n\t\n\t@classmethod\n\tdef valid_endpoint(cls):\n\t\t\"\"\"\n\t\tis this class a valid point to end our search on?\n\t\t\n\t\tProbably want to override this too, as we may in the future\n\t\twant to be able to end on non-leaf nodes\n\t\t\n\t\tMay want to combine this with go_deeper in some way, eventually\n\t\t\"\"\"\n\t\treturn cls.__subclasses__() == []\n\t\n\t\n\t@classmethod\n\tdef get_all_subclasses(cls, filterfn=lambda x:x):\n\t\tsubs = []\n\t\tsubs_stack = [cls]\n\t\t\n\t\twhile subs_stack:\n\t\t\tcurrent = subs_stack.pop(0)\n\t\t\tsubs_stack += current.__subclasses__()\n\t\t\t\n\t\t\tif filterfn(current):\n\t\t\t\tsubs.append(current)\n\t\n\t\treturn subs\n\t\n\t\n\t@classmethod\n\tdef count_subclass_weights(top_class):\n\t\t\"\"\"\n\t\tcall this after you create all the classes in question, but before you create any instances of them\n\t\tusually this means put all your related Replacer subclasses in one file, and call this at the end of the file \n\t\t\"\"\"\n\t\t\n\t\tfor cls in reversed(top_class.get_all_subclasses()):\n\t\t\tcls.subclass_weight = 0 #reset everything in case we've called this function before\n\t\t\t\n\t\t\tif cls.valid_endpoint():\n\t\t\t\tcls.subclass_weight += 1\n\t\t\tfor subclass in cls.__subclasses__():\n\t\t\t\tcls.subclass_weight += subclass.subclass_weight\n\t\t\t\t\n\t\treturn {cls: cls.subclass_weight for cls in top_class.get_all_subclasses()}\n\t\t\n\n\n\nclass UniqueReplacer(Replacer):\n\t\"\"\"\n\tvariant of Replacer that doesn't permit the same subclass to be selected more than once in a given context\n\t\"\"\"\n\tpass\n\n\nclass TentativeAssignment(object): #TODO: Rename?\n\t\"\"\"\n\ttentative assignment of keys to values in a constraint-satisfaction problem.\n\t\n\tCurrently it's basically a holder for a DFS \"string\", with the ability to lock in values (with various degrees of lockedness?)\n\tLater, might get a more treelike structure of dependencies to reduce backtracking.\n\t\n\tQuestion: aren't there already constraint-satisfaction modules that might do what I want more effectively?\n\tThey might be too limited, though... they all tend to work to find optimal solutions within finite domains, whereas my stuff tends \n\tto be looking for one of many possible good solutions in a near-infinite domain. \n\t\"\"\"\n\tpass\n\nif __name__ == \"__main__\":\n\tpass", "step-ids": [ 7, 10, 11, 13, 15 ] }
[ 7, 10, 11, 13, 15 ]
newList = [] noDuplicate = [] while True: elem = input("Enter a letter : (type quit to quit) ") if elem.lower() != "quit": newList.append(elem) else: break for item in newList: if item not in noDuplicate: noDuplicate.append(item) print(noDuplicate)
normal
{ "blob_id": "7273592ab8fea10d9a3cde58690063690c74b746", "index": 4635, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile True:\n elem = input('Enter a letter : (type quit to quit) ')\n if elem.lower() != 'quit':\n newList.append(elem)\n else:\n break\nfor item in newList:\n if item not in noDuplicate:\n noDuplicate.append(item)\nprint(noDuplicate)\n", "step-3": "newList = []\nnoDuplicate = []\nwhile True:\n elem = input('Enter a letter : (type quit to quit) ')\n if elem.lower() != 'quit':\n newList.append(elem)\n else:\n break\nfor item in newList:\n if item not in noDuplicate:\n noDuplicate.append(item)\nprint(noDuplicate)\n", "step-4": "newList = []\nnoDuplicate = []\n\nwhile True:\n elem = input(\"Enter a letter : (type quit to quit) \")\n if elem.lower() != \"quit\":\n newList.append(elem)\n else:\n break\n\nfor item in newList:\n if item not in noDuplicate:\n noDuplicate.append(item)\n\nprint(noDuplicate)", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import sys from arguments_parser import parse_args from open_ldap import OpenLdap from csv_parser import parse_csv, random_password from smtp_mail import SmtpServer def create_user(open_ldap, smtp, entries): """ If the 'ldap_insert' returns True, then the email will be send with the account info. """ try: if open_ldap.ldap_insert(entries): smtp.send_email(entries) return True else: return False except Exception as e: print('ERROR - ', e) return def run(args): """ Creates the OpenLDAP and SMTP objects and iterates over the .csv file. Calls the create_user function and check the result (if 'true' the count will be increased). Returns the total count of users created. """ open_ldap = OpenLdap(args.user, args.password, args.address) smtp = SmtpServer(args.smtp_host, args.port, args.email, args.email_password) entries = {} count = 0 for row in parse_csv(args.file): try: entries['name'] = row['name'] entries['lastname'] = row['lastname'] entries['email'] = row['email'] except KeyError as e: return "ERROR - Missing '{}' csv header".format(e) entries['password'] = random_password() if create_user(open_ldap, smtp, entries): count += 1 return "INFO - Finished. Total of {} user(s) created".format(count) def main(): args = parse_args() print(run(args)) return 0 if __name__ == "__main__": sys.exit(main())
normal
{ "blob_id": "4f0a0089ad128edca3052da58a4c71f935592e25", "index": 4499, "step-1": "<mask token>\n\n\ndef create_user(open_ldap, smtp, entries):\n \"\"\"\n If the 'ldap_insert' returns True, then\n the email will be send with the account info.\n \"\"\"\n try:\n if open_ldap.ldap_insert(entries):\n smtp.send_email(entries)\n return True\n else:\n return False\n except Exception as e:\n print('ERROR - ', e)\n return\n\n\n<mask token>\n\n\ndef main():\n args = parse_args()\n print(run(args))\n return 0\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef create_user(open_ldap, smtp, entries):\n \"\"\"\n If the 'ldap_insert' returns True, then\n the email will be send with the account info.\n \"\"\"\n try:\n if open_ldap.ldap_insert(entries):\n smtp.send_email(entries)\n return True\n else:\n return False\n except Exception as e:\n print('ERROR - ', e)\n return\n\n\ndef run(args):\n \"\"\"\n Creates the OpenLDAP and SMTP\n objects and iterates over the .csv file.\n Calls the create_user function and check the\n result (if 'true' the count will be increased).\n Returns the total count of users created.\n \"\"\"\n open_ldap = OpenLdap(args.user, args.password, args.address)\n smtp = SmtpServer(args.smtp_host, args.port, args.email, args.\n email_password)\n entries = {}\n count = 0\n for row in parse_csv(args.file):\n try:\n entries['name'] = row['name']\n entries['lastname'] = row['lastname']\n entries['email'] = row['email']\n except KeyError as e:\n return \"ERROR - Missing '{}' csv header\".format(e)\n entries['password'] = random_password()\n if create_user(open_ldap, smtp, entries):\n count += 1\n return 'INFO - Finished. Total of {} user(s) created'.format(count)\n\n\ndef main():\n args = parse_args()\n print(run(args))\n return 0\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef create_user(open_ldap, smtp, entries):\n \"\"\"\n If the 'ldap_insert' returns True, then\n the email will be send with the account info.\n \"\"\"\n try:\n if open_ldap.ldap_insert(entries):\n smtp.send_email(entries)\n return True\n else:\n return False\n except Exception as e:\n print('ERROR - ', e)\n return\n\n\ndef run(args):\n \"\"\"\n Creates the OpenLDAP and SMTP\n objects and iterates over the .csv file.\n Calls the create_user function and check the\n result (if 'true' the count will be increased).\n Returns the total count of users created.\n \"\"\"\n open_ldap = OpenLdap(args.user, args.password, args.address)\n smtp = SmtpServer(args.smtp_host, args.port, args.email, args.\n email_password)\n entries = {}\n count = 0\n for row in parse_csv(args.file):\n try:\n entries['name'] = row['name']\n entries['lastname'] = row['lastname']\n entries['email'] = row['email']\n except KeyError as e:\n return \"ERROR - Missing '{}' csv header\".format(e)\n entries['password'] = random_password()\n if create_user(open_ldap, smtp, entries):\n count += 1\n return 'INFO - Finished. Total of {} user(s) created'.format(count)\n\n\ndef main():\n args = parse_args()\n print(run(args))\n return 0\n\n\nif __name__ == '__main__':\n sys.exit(main())\n", "step-4": "import sys\nfrom arguments_parser import parse_args\nfrom open_ldap import OpenLdap\nfrom csv_parser import parse_csv, random_password\nfrom smtp_mail import SmtpServer\n\n\ndef create_user(open_ldap, smtp, entries):\n \"\"\"\n If the 'ldap_insert' returns True, then\n the email will be send with the account info.\n \"\"\"\n try:\n if open_ldap.ldap_insert(entries):\n smtp.send_email(entries)\n return True\n else:\n return False\n except Exception as e:\n print('ERROR - ', e)\n return\n\n\ndef run(args):\n \"\"\"\n Creates the OpenLDAP and SMTP\n objects and iterates over the .csv file.\n Calls the create_user function and check the\n result (if 'true' the count will be increased).\n Returns the total count of users created.\n \"\"\"\n open_ldap = OpenLdap(args.user, args.password, args.address)\n smtp = SmtpServer(args.smtp_host, args.port, args.email, args.\n email_password)\n entries = {}\n count = 0\n for row in parse_csv(args.file):\n try:\n entries['name'] = row['name']\n entries['lastname'] = row['lastname']\n entries['email'] = row['email']\n except KeyError as e:\n return \"ERROR - Missing '{}' csv header\".format(e)\n entries['password'] = random_password()\n if create_user(open_ldap, smtp, entries):\n count += 1\n return 'INFO - Finished. Total of {} user(s) created'.format(count)\n\n\ndef main():\n args = parse_args()\n print(run(args))\n return 0\n\n\nif __name__ == '__main__':\n sys.exit(main())\n", "step-5": "import sys\nfrom arguments_parser import parse_args\nfrom open_ldap import OpenLdap\nfrom csv_parser import parse_csv, random_password\nfrom smtp_mail import SmtpServer\n\n\ndef create_user(open_ldap, smtp, entries):\n \"\"\"\n If the 'ldap_insert' returns True, then\n the email will be send with the account info.\n \"\"\"\n try:\n if open_ldap.ldap_insert(entries):\n smtp.send_email(entries)\n return True\n else:\n return False\n except Exception as e:\n print('ERROR - ', e)\n return\n\n\ndef run(args):\n \"\"\"\n Creates the OpenLDAP and SMTP\n objects and iterates over the .csv file.\n Calls the create_user function and check the\n result (if 'true' the count will be increased).\n Returns the total count of users created.\n \"\"\"\n open_ldap = OpenLdap(args.user,\n args.password,\n args.address)\n smtp = SmtpServer(args.smtp_host,\n args.port,\n args.email,\n args.email_password)\n entries = {}\n count = 0\n for row in parse_csv(args.file):\n try:\n entries['name'] = row['name']\n entries['lastname'] = row['lastname']\n entries['email'] = row['email']\n except KeyError as e:\n return \"ERROR - Missing '{}' csv header\".format(e)\n entries['password'] = random_password()\n if create_user(open_ldap, smtp, entries):\n count += 1\n return \"INFO - Finished. Total of {} user(s) created\".format(count)\n\n\ndef main():\n args = parse_args()\n print(run(args))\n return 0\n\n\nif __name__ == \"__main__\":\n sys.exit(main())\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for i in range(n): t, x, y = map(int, input().split()) diff = abs(x - p[0]) + abs(y - p[1]) time = t - b if diff > time or time % 2 != diff % 2: flg = False break else: b = t p[0] = x p[1] = y if flg: print('Yes') else: print('No') <|reserved_special_token_1|> n = int(input()) b = 0 p = [0, 0] flg = True for i in range(n): t, x, y = map(int, input().split()) diff = abs(x - p[0]) + abs(y - p[1]) time = t - b if diff > time or time % 2 != diff % 2: flg = False break else: b = t p[0] = x p[1] = y if flg: print('Yes') else: print('No') <|reserved_special_token_1|> n = int(input()) b = 0 p = [0,0] flg = True for i in range(n): t,x,y = map(int,input().split()) diff = abs(x - p[0]) + abs(y - p[1]) time = t - b if(diff > time or time%2 != diff %2): flg = False break else: b = t p[0] = x p[1] = y if flg: print("Yes") else: print("No")
flexible
{ "blob_id": "8bc465a1b546907d8a9e5eee2cae672befb1ea13", "index": 7808, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in range(n):\n t, x, y = map(int, input().split())\n diff = abs(x - p[0]) + abs(y - p[1])\n time = t - b\n if diff > time or time % 2 != diff % 2:\n flg = False\n break\n else:\n b = t\n p[0] = x\n p[1] = y\nif flg:\n print('Yes')\nelse:\n print('No')\n", "step-3": "n = int(input())\nb = 0\np = [0, 0]\nflg = True\nfor i in range(n):\n t, x, y = map(int, input().split())\n diff = abs(x - p[0]) + abs(y - p[1])\n time = t - b\n if diff > time or time % 2 != diff % 2:\n flg = False\n break\n else:\n b = t\n p[0] = x\n p[1] = y\nif flg:\n print('Yes')\nelse:\n print('No')\n", "step-4": "n = int(input())\n\nb = 0\np = [0,0]\n\nflg = True\n\n\n\nfor i in range(n):\n t,x,y = map(int,input().split())\n\n diff = abs(x - p[0]) + abs(y - p[1])\n time = t - b\n if(diff > time or time%2 != diff %2):\n flg = False\n break\n else:\n b = t\n p[0] = x\n p[1] = y\n\nif flg:\n print(\"Yes\")\nelse:\n print(\"No\")\n\n\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import csv import hashdate as hd with open('Grainger_Library.csv', newline='') as f: reader = csv.reader(f) data = list(reader) del data[0] gld = [] glo = [] data.sort(key=lambda x:x[1]) for i in range(0,len(data)): gld.append((data[i][1],data[i][2])) print('ahd:') #print(ahd) glh = hd.hashdate(365,20200101) for i in range(0,len(gld)): glh.insert(gld[i][0], gld[i][1]) print('ahh:') glh.display() for i in range(0,len(glh.t)): if glh.t[i] != None: glo.append((glh.outd(i),glh.outn(i))) #print(ahh.outd(i)) print('aho:') print(glo)
normal
{ "blob_id": "79ff164c36cc5f0a2382a571ec183952a03e66cc", "index": 9570, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('Grainger_Library.csv', newline='') as f:\n reader = csv.reader(f)\n data = list(reader)\ndel data[0]\n<mask token>\ndata.sort(key=lambda x: x[1])\nfor i in range(0, len(data)):\n gld.append((data[i][1], data[i][2]))\nprint('ahd:')\n<mask token>\nfor i in range(0, len(gld)):\n glh.insert(gld[i][0], gld[i][1])\nprint('ahh:')\nglh.display()\nfor i in range(0, len(glh.t)):\n if glh.t[i] != None:\n glo.append((glh.outd(i), glh.outn(i)))\nprint('aho:')\nprint(glo)\n", "step-3": "<mask token>\nwith open('Grainger_Library.csv', newline='') as f:\n reader = csv.reader(f)\n data = list(reader)\ndel data[0]\ngld = []\nglo = []\ndata.sort(key=lambda x: x[1])\nfor i in range(0, len(data)):\n gld.append((data[i][1], data[i][2]))\nprint('ahd:')\nglh = hd.hashdate(365, 20200101)\nfor i in range(0, len(gld)):\n glh.insert(gld[i][0], gld[i][1])\nprint('ahh:')\nglh.display()\nfor i in range(0, len(glh.t)):\n if glh.t[i] != None:\n glo.append((glh.outd(i), glh.outn(i)))\nprint('aho:')\nprint(glo)\n", "step-4": "import csv\nimport hashdate as hd\nwith open('Grainger_Library.csv', newline='') as f:\n reader = csv.reader(f)\n data = list(reader)\ndel data[0]\ngld = []\nglo = []\ndata.sort(key=lambda x: x[1])\nfor i in range(0, len(data)):\n gld.append((data[i][1], data[i][2]))\nprint('ahd:')\nglh = hd.hashdate(365, 20200101)\nfor i in range(0, len(gld)):\n glh.insert(gld[i][0], gld[i][1])\nprint('ahh:')\nglh.display()\nfor i in range(0, len(glh.t)):\n if glh.t[i] != None:\n glo.append((glh.outd(i), glh.outn(i)))\nprint('aho:')\nprint(glo)\n", "step-5": "import csv\nimport hashdate as hd\n\n\n\nwith open('Grainger_Library.csv', newline='') as f:\n reader = csv.reader(f)\n data = list(reader)\ndel data[0]\ngld = []\nglo = []\ndata.sort(key=lambda x:x[1])\n\nfor i in range(0,len(data)):\n gld.append((data[i][1],data[i][2]))\nprint('ahd:')\n#print(ahd)\nglh = hd.hashdate(365,20200101)\nfor i in range(0,len(gld)):\n glh.insert(gld[i][0], gld[i][1])\nprint('ahh:')\nglh.display()\nfor i in range(0,len(glh.t)):\n if glh.t[i] != None:\n glo.append((glh.outd(i),glh.outn(i)))\n #print(ahh.outd(i))\nprint('aho:')\nprint(glo)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from copy import deepcopy from datetime import date, timedelta from hashlib import sha256 import starkbank from starkbank import BoletoPayment from .boleto import generateExampleBoletosJson example_payment = BoletoPayment( line="34191.09008 61713.957308 71444.640008 2 83430000984732", scheduled="2020-02-29", description="loading a random account", tax_id="20.018.183/0001-80", ) def generateExampleBoletoPaymentsJson(n=1, next_day=False): boletos = generateExampleBoletosJson(n=n) boletos = starkbank.boleto.create(boletos) payments = [] for boleto in boletos: payment = deepcopy(example_payment) payment.line = boleto.line payment.scheduled = min((date.today() + timedelta(days=1)) if next_day else date.today(), (boleto.due - timedelta(hours=3)).date()) payment.description = sha256(str(boleto.id).encode('utf-8')).hexdigest() payments.append(payment) return payments
normal
{ "blob_id": "383d3b35fbfb7921111b28c3160173ce1c200387", "index": 637, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef generateExampleBoletoPaymentsJson(n=1, next_day=False):\n boletos = generateExampleBoletosJson(n=n)\n boletos = starkbank.boleto.create(boletos)\n payments = []\n for boleto in boletos:\n payment = deepcopy(example_payment)\n payment.line = boleto.line\n payment.scheduled = min(date.today() + timedelta(days=1) if\n next_day else date.today(), (boleto.due - timedelta(hours=3)).\n date())\n payment.description = sha256(str(boleto.id).encode('utf-8')).hexdigest(\n )\n payments.append(payment)\n return payments\n", "step-3": "<mask token>\nexample_payment = BoletoPayment(line=\n '34191.09008 61713.957308 71444.640008 2 83430000984732', scheduled=\n '2020-02-29', description='loading a random account', tax_id=\n '20.018.183/0001-80')\n\n\ndef generateExampleBoletoPaymentsJson(n=1, next_day=False):\n boletos = generateExampleBoletosJson(n=n)\n boletos = starkbank.boleto.create(boletos)\n payments = []\n for boleto in boletos:\n payment = deepcopy(example_payment)\n payment.line = boleto.line\n payment.scheduled = min(date.today() + timedelta(days=1) if\n next_day else date.today(), (boleto.due - timedelta(hours=3)).\n date())\n payment.description = sha256(str(boleto.id).encode('utf-8')).hexdigest(\n )\n payments.append(payment)\n return payments\n", "step-4": "from copy import deepcopy\nfrom datetime import date, timedelta\nfrom hashlib import sha256\nimport starkbank\nfrom starkbank import BoletoPayment\nfrom .boleto import generateExampleBoletosJson\nexample_payment = BoletoPayment(line=\n '34191.09008 61713.957308 71444.640008 2 83430000984732', scheduled=\n '2020-02-29', description='loading a random account', tax_id=\n '20.018.183/0001-80')\n\n\ndef generateExampleBoletoPaymentsJson(n=1, next_day=False):\n boletos = generateExampleBoletosJson(n=n)\n boletos = starkbank.boleto.create(boletos)\n payments = []\n for boleto in boletos:\n payment = deepcopy(example_payment)\n payment.line = boleto.line\n payment.scheduled = min(date.today() + timedelta(days=1) if\n next_day else date.today(), (boleto.due - timedelta(hours=3)).\n date())\n payment.description = sha256(str(boleto.id).encode('utf-8')).hexdigest(\n )\n payments.append(payment)\n return payments\n", "step-5": "from copy import deepcopy\nfrom datetime import date, timedelta\nfrom hashlib import sha256\nimport starkbank\nfrom starkbank import BoletoPayment\nfrom .boleto import generateExampleBoletosJson\n\n\nexample_payment = BoletoPayment(\n line=\"34191.09008 61713.957308 71444.640008 2 83430000984732\",\n scheduled=\"2020-02-29\",\n description=\"loading a random account\",\n tax_id=\"20.018.183/0001-80\",\n)\n\n\ndef generateExampleBoletoPaymentsJson(n=1, next_day=False):\n boletos = generateExampleBoletosJson(n=n)\n\n boletos = starkbank.boleto.create(boletos)\n\n payments = []\n for boleto in boletos:\n payment = deepcopy(example_payment)\n payment.line = boleto.line\n payment.scheduled = min((date.today() + timedelta(days=1)) if next_day else date.today(), (boleto.due - timedelta(hours=3)).date())\n payment.description = sha256(str(boleto.id).encode('utf-8')).hexdigest()\n payments.append(payment)\n return payments\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# -*- coding: utf-8 -*- """Success request logging. This logging is used by "CheckZope" to determine the amount of work performed by Zope (in order not to bother it with monitor probes when it is heavily active) and to detect an unreasonable error rate. This logging writes two files "<base>_good.<date>" and "<base>_bad.<date>". For each request, a character is writen to either the good or the bad logfile, depending on whether the request was successful or unsuccessful. This means, that only the file size matters for these logfiles. Usually, response codes >= 500 are considered as unsuccessful requests. You can register an "ISuccessFull" adapter, when you need a different classification. To activate this logging, both "successlogging.zcml" must be activated and a "product-config" section with name "successlogging" must be defined containing the key "filebase". It specifies the basename of the logfiles (represented as "<base>" above). """ from .interfaces import IStatus from .interfaces import ISuccessFull from .Rotator import Rotator from zope.processlifetime import IProcessStarting from zope.component import adapter from zope.component import provideHandler from ZPublisher.interfaces import IPubFailure from ZPublisher.interfaces import IPubSuccess _log_good = _log_bad = None @adapter(IProcessStarting) def start_successlogging(unused): """start successlogging if configured.""" from App.config import getConfiguration config = getConfiguration().product_config.get('successlogging') if config is None: return # not configured global _log_good, _log_bad _log_good = Rotator(config['filebase'] + '_good', lock=True) _log_bad = Rotator(config['filebase'] + '_bad', lock=True) # register publication observers provideHandler(handle_request_success) provideHandler(handle_request_failure) @adapter(IPubSuccess) def handle_request_success(event): """handle "IPubSuccess".""" _log_good.write('*') @adapter(IPubFailure) def handle_request_failure(event): """handle "IPubFailure".""" request = event.request if event.retry: handle_request_success(event) else: # Note: Zope forgets (at least sometimes) # to inform the response about the exception. # Work around this bug. # When Zope3 views are used for error handling, they no longer # communicate via exceptions with the ZPublisher. Instead, they seem # to use 'setBody' which interferes with the 'exception' call below. # We work around this problem by saving the response state and then # restore it again. Of course, this no longer works around the Zope # bug (forgetting to call 'exception') mentioned above. response = request.response saved = response.__dict__.copy() response.setStatus(event.exc_info[0]) ok = ISuccessFull(response, None) if ok is None: status = IStatus(response, None) if status is None: status = response.getStatus() else: status = int(status) ok = status < 500 if bool(ok): handle_request_success(event) else: _log_bad.write('*') response.__dict__.update(saved) # restore response again
normal
{ "blob_id": "2edbf18c90da1ff40fd9abaf25a35dbdaf733bc1", "index": 2786, "step-1": "<mask token>\n\n\n@adapter(IProcessStarting)\ndef start_successlogging(unused):\n \"\"\"start successlogging if configured.\"\"\"\n from App.config import getConfiguration\n config = getConfiguration().product_config.get('successlogging')\n if config is None:\n return\n global _log_good, _log_bad\n _log_good = Rotator(config['filebase'] + '_good', lock=True)\n _log_bad = Rotator(config['filebase'] + '_bad', lock=True)\n provideHandler(handle_request_success)\n provideHandler(handle_request_failure)\n\n\n<mask token>\n\n\n@adapter(IPubFailure)\ndef handle_request_failure(event):\n \"\"\"handle \"IPubFailure\".\"\"\"\n request = event.request\n if event.retry:\n handle_request_success(event)\n else:\n response = request.response\n saved = response.__dict__.copy()\n response.setStatus(event.exc_info[0])\n ok = ISuccessFull(response, None)\n if ok is None:\n status = IStatus(response, None)\n if status is None:\n status = response.getStatus()\n else:\n status = int(status)\n ok = status < 500\n if bool(ok):\n handle_request_success(event)\n else:\n _log_bad.write('*')\n response.__dict__.update(saved)\n", "step-2": "<mask token>\n\n\n@adapter(IProcessStarting)\ndef start_successlogging(unused):\n \"\"\"start successlogging if configured.\"\"\"\n from App.config import getConfiguration\n config = getConfiguration().product_config.get('successlogging')\n if config is None:\n return\n global _log_good, _log_bad\n _log_good = Rotator(config['filebase'] + '_good', lock=True)\n _log_bad = Rotator(config['filebase'] + '_bad', lock=True)\n provideHandler(handle_request_success)\n provideHandler(handle_request_failure)\n\n\n@adapter(IPubSuccess)\ndef handle_request_success(event):\n \"\"\"handle \"IPubSuccess\".\"\"\"\n _log_good.write('*')\n\n\n@adapter(IPubFailure)\ndef handle_request_failure(event):\n \"\"\"handle \"IPubFailure\".\"\"\"\n request = event.request\n if event.retry:\n handle_request_success(event)\n else:\n response = request.response\n saved = response.__dict__.copy()\n response.setStatus(event.exc_info[0])\n ok = ISuccessFull(response, None)\n if ok is None:\n status = IStatus(response, None)\n if status is None:\n status = response.getStatus()\n else:\n status = int(status)\n ok = status < 500\n if bool(ok):\n handle_request_success(event)\n else:\n _log_bad.write('*')\n response.__dict__.update(saved)\n", "step-3": "<mask token>\n_log_good = _log_bad = None\n\n\n@adapter(IProcessStarting)\ndef start_successlogging(unused):\n \"\"\"start successlogging if configured.\"\"\"\n from App.config import getConfiguration\n config = getConfiguration().product_config.get('successlogging')\n if config is None:\n return\n global _log_good, _log_bad\n _log_good = Rotator(config['filebase'] + '_good', lock=True)\n _log_bad = Rotator(config['filebase'] + '_bad', lock=True)\n provideHandler(handle_request_success)\n provideHandler(handle_request_failure)\n\n\n@adapter(IPubSuccess)\ndef handle_request_success(event):\n \"\"\"handle \"IPubSuccess\".\"\"\"\n _log_good.write('*')\n\n\n@adapter(IPubFailure)\ndef handle_request_failure(event):\n \"\"\"handle \"IPubFailure\".\"\"\"\n request = event.request\n if event.retry:\n handle_request_success(event)\n else:\n response = request.response\n saved = response.__dict__.copy()\n response.setStatus(event.exc_info[0])\n ok = ISuccessFull(response, None)\n if ok is None:\n status = IStatus(response, None)\n if status is None:\n status = response.getStatus()\n else:\n status = int(status)\n ok = status < 500\n if bool(ok):\n handle_request_success(event)\n else:\n _log_bad.write('*')\n response.__dict__.update(saved)\n", "step-4": "<mask token>\nfrom .interfaces import IStatus\nfrom .interfaces import ISuccessFull\nfrom .Rotator import Rotator\nfrom zope.processlifetime import IProcessStarting\nfrom zope.component import adapter\nfrom zope.component import provideHandler\nfrom ZPublisher.interfaces import IPubFailure\nfrom ZPublisher.interfaces import IPubSuccess\n_log_good = _log_bad = None\n\n\n@adapter(IProcessStarting)\ndef start_successlogging(unused):\n \"\"\"start successlogging if configured.\"\"\"\n from App.config import getConfiguration\n config = getConfiguration().product_config.get('successlogging')\n if config is None:\n return\n global _log_good, _log_bad\n _log_good = Rotator(config['filebase'] + '_good', lock=True)\n _log_bad = Rotator(config['filebase'] + '_bad', lock=True)\n provideHandler(handle_request_success)\n provideHandler(handle_request_failure)\n\n\n@adapter(IPubSuccess)\ndef handle_request_success(event):\n \"\"\"handle \"IPubSuccess\".\"\"\"\n _log_good.write('*')\n\n\n@adapter(IPubFailure)\ndef handle_request_failure(event):\n \"\"\"handle \"IPubFailure\".\"\"\"\n request = event.request\n if event.retry:\n handle_request_success(event)\n else:\n response = request.response\n saved = response.__dict__.copy()\n response.setStatus(event.exc_info[0])\n ok = ISuccessFull(response, None)\n if ok is None:\n status = IStatus(response, None)\n if status is None:\n status = response.getStatus()\n else:\n status = int(status)\n ok = status < 500\n if bool(ok):\n handle_request_success(event)\n else:\n _log_bad.write('*')\n response.__dict__.update(saved)\n", "step-5": "# -*- coding: utf-8 -*-\n\"\"\"Success request logging.\n\nThis logging is used by \"CheckZope\" to determine the amount\nof work performed by Zope (in order not to bother it with monitor\nprobes when it is heavily active) and to detect an unreasonable\nerror rate.\n\nThis logging writes two files \"<base>_good.<date>\" and \"<base>_bad.<date>\".\nFor each request, a character is writen to either the good or\nthe bad logfile, depending on whether the request was successful or\nunsuccessful. This means, that only the file size matters for\nthese logfiles.\n\nUsually, response codes >= 500 are considered as unsuccessful requests.\nYou can register an \"ISuccessFull\" adapter, when you need\na different classification.\n\nTo activate this logging, both \"successlogging.zcml\" must be activated\nand a \"product-config\" section with name \"successlogging\" must be defined\ncontaining the key \"filebase\".\nIt specifies the basename of the logfiles (represented as \"<base>\" above).\n\"\"\"\n\nfrom .interfaces import IStatus\nfrom .interfaces import ISuccessFull\nfrom .Rotator import Rotator\nfrom zope.processlifetime import IProcessStarting\nfrom zope.component import adapter\nfrom zope.component import provideHandler\nfrom ZPublisher.interfaces import IPubFailure\nfrom ZPublisher.interfaces import IPubSuccess\n\n_log_good = _log_bad = None\n\n\n@adapter(IProcessStarting)\ndef start_successlogging(unused):\n \"\"\"start successlogging if configured.\"\"\"\n from App.config import getConfiguration\n config = getConfiguration().product_config.get('successlogging')\n if config is None:\n return # not configured\n global _log_good, _log_bad\n _log_good = Rotator(config['filebase'] + '_good', lock=True)\n _log_bad = Rotator(config['filebase'] + '_bad', lock=True)\n # register publication observers\n provideHandler(handle_request_success)\n provideHandler(handle_request_failure)\n\n\n@adapter(IPubSuccess)\ndef handle_request_success(event):\n \"\"\"handle \"IPubSuccess\".\"\"\"\n _log_good.write('*')\n\n\n@adapter(IPubFailure)\ndef handle_request_failure(event):\n \"\"\"handle \"IPubFailure\".\"\"\"\n request = event.request\n if event.retry:\n handle_request_success(event)\n else:\n # Note: Zope forgets (at least sometimes)\n # to inform the response about the exception.\n # Work around this bug.\n # When Zope3 views are used for error handling, they no longer\n # communicate via exceptions with the ZPublisher. Instead, they seem\n # to use 'setBody' which interferes with the 'exception' call below.\n # We work around this problem by saving the response state and then\n # restore it again. Of course, this no longer works around the Zope\n # bug (forgetting to call 'exception') mentioned above.\n response = request.response\n saved = response.__dict__.copy()\n response.setStatus(event.exc_info[0])\n ok = ISuccessFull(response, None)\n if ok is None:\n status = IStatus(response, None)\n if status is None:\n status = response.getStatus()\n else:\n status = int(status)\n ok = status < 500\n if bool(ok):\n handle_request_success(event)\n else:\n _log_bad.write('*')\n response.__dict__.update(saved) # restore response again\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> class RestApiTestSuite(unittest.TestCase): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> @pytest.fixture(autouse=True) def setup_gateway(self, metadata): self.gateway = proactive.ProActiveGateway(metadata['proactive_url'], debug=True) self.username = metadata['username'] self.password = metadata['password'] <|reserved_special_token_0|> def test_rm_model_nodesources(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() nodesources = restapi.get_rm_model_nodesources() self.assertIsNotNone(nodesources) self.assertTrue(isinstance(nodesources, list)) self.gateway.disconnect() def test_rm_model_tokens(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() tokens = restapi.get_rm_model_tokens() self.assertIsNotNone(tokens) self.assertTrue(isinstance(tokens, list)) self.gateway.disconnect() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class RestApiTestSuite(unittest.TestCase): """Advanced test cases.""" gateway = None username = '' password = '' @pytest.fixture(autouse=True) def setup_gateway(self, metadata): self.gateway = proactive.ProActiveGateway(metadata['proactive_url'], debug=True) self.username = metadata['username'] self.password = metadata['password'] def test_rm_model_hosts(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() hosts = restapi.get_rm_model_hosts() self.assertIsNotNone(hosts) self.assertTrue(isinstance(hosts, list)) self.gateway.disconnect() def test_rm_model_nodesources(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() nodesources = restapi.get_rm_model_nodesources() self.assertIsNotNone(nodesources) self.assertTrue(isinstance(nodesources, list)) self.gateway.disconnect() def test_rm_model_tokens(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() tokens = restapi.get_rm_model_tokens() self.assertIsNotNone(tokens) self.assertTrue(isinstance(tokens, list)) self.gateway.disconnect() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class RestApiTestSuite(unittest.TestCase): """Advanced test cases.""" gateway = None username = '' password = '' @pytest.fixture(autouse=True) def setup_gateway(self, metadata): self.gateway = proactive.ProActiveGateway(metadata['proactive_url'], debug=True) self.username = metadata['username'] self.password = metadata['password'] def test_rm_model_hosts(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() hosts = restapi.get_rm_model_hosts() self.assertIsNotNone(hosts) self.assertTrue(isinstance(hosts, list)) self.gateway.disconnect() def test_rm_model_nodesources(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() nodesources = restapi.get_rm_model_nodesources() self.assertIsNotNone(nodesources) self.assertTrue(isinstance(nodesources, list)) self.gateway.disconnect() def test_rm_model_tokens(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() tokens = restapi.get_rm_model_tokens() self.assertIsNotNone(tokens) self.assertTrue(isinstance(tokens, list)) self.gateway.disconnect() if __name__ == '__main__': unittest.main() <|reserved_special_token_1|> import proactive import unittest import numbers import os import pytest class RestApiTestSuite(unittest.TestCase): """Advanced test cases.""" gateway = None username = '' password = '' @pytest.fixture(autouse=True) def setup_gateway(self, metadata): self.gateway = proactive.ProActiveGateway(metadata['proactive_url'], debug=True) self.username = metadata['username'] self.password = metadata['password'] def test_rm_model_hosts(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() hosts = restapi.get_rm_model_hosts() self.assertIsNotNone(hosts) self.assertTrue(isinstance(hosts, list)) self.gateway.disconnect() def test_rm_model_nodesources(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() nodesources = restapi.get_rm_model_nodesources() self.assertIsNotNone(nodesources) self.assertTrue(isinstance(nodesources, list)) self.gateway.disconnect() def test_rm_model_tokens(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() tokens = restapi.get_rm_model_tokens() self.assertIsNotNone(tokens) self.assertTrue(isinstance(tokens, list)) self.gateway.disconnect() if __name__ == '__main__': unittest.main() <|reserved_special_token_1|> import proactive import unittest import numbers import os import pytest class RestApiTestSuite(unittest.TestCase): """Advanced test cases.""" gateway = None username = "" password = "" @pytest.fixture(autouse=True) def setup_gateway(self, metadata): self.gateway = proactive.ProActiveGateway(metadata['proactive_url'], debug=True) self.username = metadata['username'] self.password = metadata['password'] def test_rm_model_hosts(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() hosts = restapi.get_rm_model_hosts() self.assertIsNotNone(hosts) self.assertTrue(isinstance(hosts, list)) self.gateway.disconnect() def test_rm_model_nodesources(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() nodesources = restapi.get_rm_model_nodesources() self.assertIsNotNone(nodesources) self.assertTrue(isinstance(nodesources, list)) self.gateway.disconnect() def test_rm_model_tokens(self): self.gateway.connect(self.username, self.password) restapi = self.gateway.getProactiveRestApi() tokens = restapi.get_rm_model_tokens() self.assertIsNotNone(tokens) self.assertTrue(isinstance(tokens, list)) self.gateway.disconnect() if __name__ == '__main__': unittest.main()
flexible
{ "blob_id": "da2c615b8fab8de6bd63864508da254a46e65bb8", "index": 4543, "step-1": "<mask token>\n\n\nclass RestApiTestSuite(unittest.TestCase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @pytest.fixture(autouse=True)\n def setup_gateway(self, metadata):\n self.gateway = proactive.ProActiveGateway(metadata['proactive_url'],\n debug=True)\n self.username = metadata['username']\n self.password = metadata['password']\n <mask token>\n\n def test_rm_model_nodesources(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n nodesources = restapi.get_rm_model_nodesources()\n self.assertIsNotNone(nodesources)\n self.assertTrue(isinstance(nodesources, list))\n self.gateway.disconnect()\n\n def test_rm_model_tokens(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n tokens = restapi.get_rm_model_tokens()\n self.assertIsNotNone(tokens)\n self.assertTrue(isinstance(tokens, list))\n self.gateway.disconnect()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass RestApiTestSuite(unittest.TestCase):\n \"\"\"Advanced test cases.\"\"\"\n gateway = None\n username = ''\n password = ''\n\n @pytest.fixture(autouse=True)\n def setup_gateway(self, metadata):\n self.gateway = proactive.ProActiveGateway(metadata['proactive_url'],\n debug=True)\n self.username = metadata['username']\n self.password = metadata['password']\n\n def test_rm_model_hosts(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n hosts = restapi.get_rm_model_hosts()\n self.assertIsNotNone(hosts)\n self.assertTrue(isinstance(hosts, list))\n self.gateway.disconnect()\n\n def test_rm_model_nodesources(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n nodesources = restapi.get_rm_model_nodesources()\n self.assertIsNotNone(nodesources)\n self.assertTrue(isinstance(nodesources, list))\n self.gateway.disconnect()\n\n def test_rm_model_tokens(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n tokens = restapi.get_rm_model_tokens()\n self.assertIsNotNone(tokens)\n self.assertTrue(isinstance(tokens, list))\n self.gateway.disconnect()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass RestApiTestSuite(unittest.TestCase):\n \"\"\"Advanced test cases.\"\"\"\n gateway = None\n username = ''\n password = ''\n\n @pytest.fixture(autouse=True)\n def setup_gateway(self, metadata):\n self.gateway = proactive.ProActiveGateway(metadata['proactive_url'],\n debug=True)\n self.username = metadata['username']\n self.password = metadata['password']\n\n def test_rm_model_hosts(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n hosts = restapi.get_rm_model_hosts()\n self.assertIsNotNone(hosts)\n self.assertTrue(isinstance(hosts, list))\n self.gateway.disconnect()\n\n def test_rm_model_nodesources(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n nodesources = restapi.get_rm_model_nodesources()\n self.assertIsNotNone(nodesources)\n self.assertTrue(isinstance(nodesources, list))\n self.gateway.disconnect()\n\n def test_rm_model_tokens(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n tokens = restapi.get_rm_model_tokens()\n self.assertIsNotNone(tokens)\n self.assertTrue(isinstance(tokens, list))\n self.gateway.disconnect()\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-4": "import proactive\nimport unittest\nimport numbers\nimport os\nimport pytest\n\n\nclass RestApiTestSuite(unittest.TestCase):\n \"\"\"Advanced test cases.\"\"\"\n gateway = None\n username = ''\n password = ''\n\n @pytest.fixture(autouse=True)\n def setup_gateway(self, metadata):\n self.gateway = proactive.ProActiveGateway(metadata['proactive_url'],\n debug=True)\n self.username = metadata['username']\n self.password = metadata['password']\n\n def test_rm_model_hosts(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n hosts = restapi.get_rm_model_hosts()\n self.assertIsNotNone(hosts)\n self.assertTrue(isinstance(hosts, list))\n self.gateway.disconnect()\n\n def test_rm_model_nodesources(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n nodesources = restapi.get_rm_model_nodesources()\n self.assertIsNotNone(nodesources)\n self.assertTrue(isinstance(nodesources, list))\n self.gateway.disconnect()\n\n def test_rm_model_tokens(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n tokens = restapi.get_rm_model_tokens()\n self.assertIsNotNone(tokens)\n self.assertTrue(isinstance(tokens, list))\n self.gateway.disconnect()\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-5": "import proactive\nimport unittest\nimport numbers\nimport os\nimport pytest\n\n\nclass RestApiTestSuite(unittest.TestCase):\n \"\"\"Advanced test cases.\"\"\"\n\n gateway = None\n username = \"\"\n password = \"\"\n\n @pytest.fixture(autouse=True)\n def setup_gateway(self, metadata):\n self.gateway = proactive.ProActiveGateway(metadata['proactive_url'], debug=True)\n self.username = metadata['username']\n self.password = metadata['password']\n\n def test_rm_model_hosts(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n hosts = restapi.get_rm_model_hosts()\n self.assertIsNotNone(hosts)\n self.assertTrue(isinstance(hosts, list))\n self.gateway.disconnect()\n\n def test_rm_model_nodesources(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n nodesources = restapi.get_rm_model_nodesources()\n self.assertIsNotNone(nodesources)\n self.assertTrue(isinstance(nodesources, list))\n self.gateway.disconnect()\n\n def test_rm_model_tokens(self):\n self.gateway.connect(self.username, self.password)\n restapi = self.gateway.getProactiveRestApi()\n tokens = restapi.get_rm_model_tokens()\n self.assertIsNotNone(tokens)\n self.assertTrue(isinstance(tokens, list))\n self.gateway.disconnect()\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-ids": [ 4, 7, 8, 9, 10 ] }
[ 4, 7, 8, 9, 10 ]
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2018-12-20 13:06 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('login', '0006_usermovies_img'), ] operations = [ migrations.AddField( model_name='moviesinfo', name='country', field=models.CharField(default=1, max_length=50), preserve_default=False, ), migrations.AddField( model_name='moviesinfo', name='description', field=models.CharField(default=1, max_length=200), preserve_default=False, ), migrations.AddField( model_name='moviesinfo', name='director', field=models.CharField(default=1, max_length=100), preserve_default=False, ), migrations.AddField( model_name='moviesinfo', name='grenre', field=models.CharField(default=1, max_length=50), preserve_default=False, ), migrations.AddField( model_name='moviesinfo', name='year', field=models.CharField(default=1, max_length=8), preserve_default=False, ), migrations.AddField( model_name='usermovies', name='country', field=models.CharField(default=1, max_length=50), preserve_default=False, ), migrations.AddField( model_name='usermovies', name='description', field=models.CharField(default=1, max_length=200), preserve_default=False, ), migrations.AddField( model_name='usermovies', name='director', field=models.CharField(default=1, max_length=100), preserve_default=False, ), migrations.AddField( model_name='usermovies', name='grenre', field=models.CharField(default=1, max_length=50), preserve_default=False, ), migrations.AddField( model_name='usermovies', name='year', field=models.CharField(default=1, max_length=8), preserve_default=False, ), ]
normal
{ "blob_id": "e67cbddf10440e8a31373e05a82840677d3045f5", "index": 4388, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('login', '0006_usermovies_img')]\n operations = [migrations.AddField(model_name='moviesinfo', name=\n 'country', field=models.CharField(default=1, max_length=50),\n preserve_default=False), migrations.AddField(model_name=\n 'moviesinfo', name='description', field=models.CharField(default=1,\n max_length=200), preserve_default=False), migrations.AddField(\n model_name='moviesinfo', name='director', field=models.CharField(\n default=1, max_length=100), preserve_default=False), migrations.\n AddField(model_name='moviesinfo', name='grenre', field=models.\n CharField(default=1, max_length=50), preserve_default=False),\n migrations.AddField(model_name='moviesinfo', name='year', field=\n models.CharField(default=1, max_length=8), preserve_default=False),\n migrations.AddField(model_name='usermovies', name='country', field=\n models.CharField(default=1, max_length=50), preserve_default=False),\n migrations.AddField(model_name='usermovies', name='description',\n field=models.CharField(default=1, max_length=200), preserve_default\n =False), migrations.AddField(model_name='usermovies', name=\n 'director', field=models.CharField(default=1, max_length=100),\n preserve_default=False), migrations.AddField(model_name=\n 'usermovies', name='grenre', field=models.CharField(default=1,\n max_length=50), preserve_default=False), migrations.AddField(\n model_name='usermovies', name='year', field=models.CharField(\n default=1, max_length=8), preserve_default=False)]\n", "step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('login', '0006_usermovies_img')]\n operations = [migrations.AddField(model_name='moviesinfo', name=\n 'country', field=models.CharField(default=1, max_length=50),\n preserve_default=False), migrations.AddField(model_name=\n 'moviesinfo', name='description', field=models.CharField(default=1,\n max_length=200), preserve_default=False), migrations.AddField(\n model_name='moviesinfo', name='director', field=models.CharField(\n default=1, max_length=100), preserve_default=False), migrations.\n AddField(model_name='moviesinfo', name='grenre', field=models.\n CharField(default=1, max_length=50), preserve_default=False),\n migrations.AddField(model_name='moviesinfo', name='year', field=\n models.CharField(default=1, max_length=8), preserve_default=False),\n migrations.AddField(model_name='usermovies', name='country', field=\n models.CharField(default=1, max_length=50), preserve_default=False),\n migrations.AddField(model_name='usermovies', name='description',\n field=models.CharField(default=1, max_length=200), preserve_default\n =False), migrations.AddField(model_name='usermovies', name=\n 'director', field=models.CharField(default=1, max_length=100),\n preserve_default=False), migrations.AddField(model_name=\n 'usermovies', name='grenre', field=models.CharField(default=1,\n max_length=50), preserve_default=False), migrations.AddField(\n model_name='usermovies', name='year', field=models.CharField(\n default=1, max_length=8), preserve_default=False)]\n", "step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2018-12-20 13:06\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('login', '0006_usermovies_img'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='moviesinfo',\n name='country',\n field=models.CharField(default=1, max_length=50),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='moviesinfo',\n name='description',\n field=models.CharField(default=1, max_length=200),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='moviesinfo',\n name='director',\n field=models.CharField(default=1, max_length=100),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='moviesinfo',\n name='grenre',\n field=models.CharField(default=1, max_length=50),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='moviesinfo',\n name='year',\n field=models.CharField(default=1, max_length=8),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='usermovies',\n name='country',\n field=models.CharField(default=1, max_length=50),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='usermovies',\n name='description',\n field=models.CharField(default=1, max_length=200),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='usermovies',\n name='director',\n field=models.CharField(default=1, max_length=100),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='usermovies',\n name='grenre',\n field=models.CharField(default=1, max_length=50),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='usermovies',\n name='year',\n field=models.CharField(default=1, max_length=8),\n preserve_default=False,\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> app_name = 'Accounts' urlpatterns = [path('update_info', views.update_info, name='update_info'), path('create_user', views.create_user, name='create_user'), path( 'change_password', views.change_password, name='change_password'), path ('register', views.register, name='register'), path('login', views. login, name='login'), path('logout', views.logout, name='logout'), path ('test_auth', views.test, name='test'), url( '^activate/(?P<uidb64>[0-9A-Za-z_\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$' , views.activate, name='activate'), path( 'change_user_status/<int:user_id>/<int:status>', views. change_user_status, name='change_user_status'), path( 'change_user_privilege/<int:user_id>/<int:status>', views. change_user_privilege, name='change_user_privilege')] <|reserved_special_token_1|> from django.conf.urls import url from django.urls import path from . import views app_name = 'Accounts' urlpatterns = [path('update_info', views.update_info, name='update_info'), path('create_user', views.create_user, name='create_user'), path( 'change_password', views.change_password, name='change_password'), path ('register', views.register, name='register'), path('login', views. login, name='login'), path('logout', views.logout, name='logout'), path ('test_auth', views.test, name='test'), url( '^activate/(?P<uidb64>[0-9A-Za-z_\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$' , views.activate, name='activate'), path( 'change_user_status/<int:user_id>/<int:status>', views. change_user_status, name='change_user_status'), path( 'change_user_privilege/<int:user_id>/<int:status>', views. change_user_privilege, name='change_user_privilege')] <|reserved_special_token_1|> from django.conf.urls import url from django.urls import path from . import views app_name = 'Accounts' urlpatterns = [ path('update_info', views.update_info, name='update_info'), path('create_user', views.create_user, name='create_user'), path('change_password', views.change_password, name='change_password'), path('register', views.register, name='register'), path('login', views.login, name='login'), path('logout', views.logout, name='logout'), path('test_auth', views.test, name='test'), url(r'^activate/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', views.activate, name='activate'), path('change_user_status/<int:user_id>/<int:status>', views.change_user_status, name='change_user_status'), path('change_user_privilege/<int:user_id>/<int:status>', views.change_user_privilege, name='change_user_privilege'), ]
flexible
{ "blob_id": "bfb778a2ecf43a697bc0e3449e9302142b20e1f4", "index": 4278, "step-1": "<mask token>\n", "step-2": "<mask token>\napp_name = 'Accounts'\nurlpatterns = [path('update_info', views.update_info, name='update_info'),\n path('create_user', views.create_user, name='create_user'), path(\n 'change_password', views.change_password, name='change_password'), path\n ('register', views.register, name='register'), path('login', views.\n login, name='login'), path('logout', views.logout, name='logout'), path\n ('test_auth', views.test, name='test'), url(\n '^activate/(?P<uidb64>[0-9A-Za-z_\\\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$'\n , views.activate, name='activate'), path(\n 'change_user_status/<int:user_id>/<int:status>', views.\n change_user_status, name='change_user_status'), path(\n 'change_user_privilege/<int:user_id>/<int:status>', views.\n change_user_privilege, name='change_user_privilege')]\n", "step-3": "from django.conf.urls import url\nfrom django.urls import path\nfrom . import views\napp_name = 'Accounts'\nurlpatterns = [path('update_info', views.update_info, name='update_info'),\n path('create_user', views.create_user, name='create_user'), path(\n 'change_password', views.change_password, name='change_password'), path\n ('register', views.register, name='register'), path('login', views.\n login, name='login'), path('logout', views.logout, name='logout'), path\n ('test_auth', views.test, name='test'), url(\n '^activate/(?P<uidb64>[0-9A-Za-z_\\\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$'\n , views.activate, name='activate'), path(\n 'change_user_status/<int:user_id>/<int:status>', views.\n change_user_status, name='change_user_status'), path(\n 'change_user_privilege/<int:user_id>/<int:status>', views.\n change_user_privilege, name='change_user_privilege')]\n", "step-4": "from django.conf.urls import url\nfrom django.urls import path\n\nfrom . import views\n\napp_name = 'Accounts'\nurlpatterns = [\n path('update_info', views.update_info, name='update_info'),\n path('create_user', views.create_user, name='create_user'),\n path('change_password', views.change_password, name='change_password'),\n path('register', views.register, name='register'),\n path('login', views.login, name='login'),\n path('logout', views.logout, name='logout'),\n path('test_auth', views.test, name='test'),\n url(r'^activate/(?P<uidb64>[0-9A-Za-z_\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',\n views.activate, name='activate'),\n path('change_user_status/<int:user_id>/<int:status>', views.change_user_status, name='change_user_status'),\n path('change_user_privilege/<int:user_id>/<int:status>', views.change_user_privilege, name='change_user_privilege'),\n]\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> def create_local_imports_files(tmp_path): path_dir = os.path.join(tmp_path, 'dir_local_imports') fln_func = os.path.join(path_dir, 'file_func.py') fln_gen = os.path.join(path_dir, 'file_gen.py') os.makedirs(path_dir, exist_ok=True) code1 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f1(some_value, user_ns, ipython): user_ns["func_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f1a(some_value, user_ns): user_ns["func_A_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"]) """ with open(fln_func, 'w') as f: f.writelines(code1) code2 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f2(some_value, user_ns, ipython): user_ns["gen_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f2a(some_value, user_ns): user_ns["gen_A_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"]) @set_user_ns def f3(some_value, user_ns, ipython): user_ns["value_f3"] = some_value f3(91) """ with open(fln_gen, 'w') as f: f.writelines(code2) <|reserved_special_token_0|> def test_set_user_ns_1(tmp_path): """ Tests for ``set_user_ns`` decorator. The functionality of the decorator is fully tested (only without IPython): - using ``global_user_namespace`` to pass values in and out of the function defined in the imported module (emulation of ``get_ipython().user_ns``). - checking if the function is executed from IPython (only for the function defined in the imported module). """ pc_path = copy_default_profile_collection(tmp_path) create_local_imports_files(pc_path) patch_first_startup_file(pc_path, patch_code) nspace = load_profile_collection(pc_path) assert len(nspace) > 0, 'Failed to load the profile collection' assert 'f1' in nspace, 'Test for local imports failed' assert 'f2' in nspace, 'Test for local imports failed' assert inspect.isgeneratorfunction(nspace['f1']) is False assert inspect.isgeneratorfunction(nspace['f2']) is True def check_signature(func): params = inspect.signature(func).parameters assert 'user_ns' not in params assert 'ipython' not in params check_signature(nspace['f1']) check_signature(nspace['f1a']) check_signature(nspace['f2']) check_signature(nspace['f2a']) assert nspace['value_f3'] == 91 assert nspace['value_f4'] == 90 global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False) global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func' assert nspace['v_from_namespace'] == 'value-sent-to-func' result_func = nspace['f1'](60) assert nspace['func_was_called'] == 'func_was_called' assert result_func[0] == 60 assert result_func[1] == 'value-sent-to-func' assert result_func[2] is False result_func = nspace['f1a'](65) assert nspace['func_A_was_called'] == 'func_was_called' assert result_func[0] == 65 assert result_func[1] == 'value-sent-to-func' global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen' result_func = list(nspace['f2'](110))[0] assert nspace['gen_was_called'] == 'gen_was_called' assert result_func[0] == 110 assert result_func[1] == 'value-sent-to-gen' assert result_func[2] is False result_func = list(nspace['f2a'](115))[0] assert nspace['gen_A_was_called'] == 'gen_was_called' assert result_func[0] == 115 assert result_func[1] == 'value-sent-to-gen' def test_global_user_namespace(): """ Basic test for ``global_user_namespace``. """ ns = {'ab': 1, 'cd': 2} global_user_namespace.set_user_namespace(user_ns=ns) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True) assert global_user_namespace.user_ns == {} assert global_user_namespace.use_ipython is True global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False <|reserved_special_token_0|> @pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg' , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det', ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), ( 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), (( ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), ( 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), ('string', ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), (('det', 10), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 10, 'motor'), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 'det', (10, 'motor2')), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10 )), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 'motor2', 10)), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', 'motor10'), ( 'det', 'motor10'), {}, False, 'No devices with name'), (('det', 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'), (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), (( 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False, "No devices with name 'det' were found in Happi database."), (('motor3' ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor', 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor', 'Motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'moTor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '_motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', ' motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor ']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '2motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers')]) def test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg): """ Tests for ``load_devices_from_happi``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) if success: ns = {} dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args) assert len(ns) == len(loaded_names), str(ns) for d in loaded_names: assert d in ns assert set(dlist) == set(loaded_names) else: with pytest.raises(Exception, match=errmsg): ns = {} load_devices_from_happi(device_names, namespace=ns, **kw_args) def _test_loading(device_names, loaded_names): if success: load_devices_from_happi(device_names, namespace=locals(), **kw_args ) for d in loaded_names: assert d in locals() else: with pytest.raises(Exception, match=errmsg): load_devices_from_happi(device_names, namespace=locals(), **kw_args) _test_loading(device_names=device_names, loaded_names=loaded_names) def test_load_devices_from_happi_2_fail(tmp_path, monkeypatch): """ Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) with pytest.raises(TypeError, match= "missing 1 required keyword-only argument: 'namespace'"): load_devices_from_happi(['det', 'motor']) with pytest.raises(TypeError, match= "Parameter 'namespace' must be a dictionary"): load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3]) <|reserved_special_token_1|> <|reserved_special_token_0|> def create_local_imports_files(tmp_path): path_dir = os.path.join(tmp_path, 'dir_local_imports') fln_func = os.path.join(path_dir, 'file_func.py') fln_gen = os.path.join(path_dir, 'file_gen.py') os.makedirs(path_dir, exist_ok=True) code1 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f1(some_value, user_ns, ipython): user_ns["func_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f1a(some_value, user_ns): user_ns["func_A_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"]) """ with open(fln_func, 'w') as f: f.writelines(code1) code2 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f2(some_value, user_ns, ipython): user_ns["gen_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f2a(some_value, user_ns): user_ns["gen_A_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"]) @set_user_ns def f3(some_value, user_ns, ipython): user_ns["value_f3"] = some_value f3(91) """ with open(fln_gen, 'w') as f: f.writelines(code2) <|reserved_special_token_0|> def test_set_user_ns_1(tmp_path): """ Tests for ``set_user_ns`` decorator. The functionality of the decorator is fully tested (only without IPython): - using ``global_user_namespace`` to pass values in and out of the function defined in the imported module (emulation of ``get_ipython().user_ns``). - checking if the function is executed from IPython (only for the function defined in the imported module). """ pc_path = copy_default_profile_collection(tmp_path) create_local_imports_files(pc_path) patch_first_startup_file(pc_path, patch_code) nspace = load_profile_collection(pc_path) assert len(nspace) > 0, 'Failed to load the profile collection' assert 'f1' in nspace, 'Test for local imports failed' assert 'f2' in nspace, 'Test for local imports failed' assert inspect.isgeneratorfunction(nspace['f1']) is False assert inspect.isgeneratorfunction(nspace['f2']) is True def check_signature(func): params = inspect.signature(func).parameters assert 'user_ns' not in params assert 'ipython' not in params check_signature(nspace['f1']) check_signature(nspace['f1a']) check_signature(nspace['f2']) check_signature(nspace['f2a']) assert nspace['value_f3'] == 91 assert nspace['value_f4'] == 90 global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False) global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func' assert nspace['v_from_namespace'] == 'value-sent-to-func' result_func = nspace['f1'](60) assert nspace['func_was_called'] == 'func_was_called' assert result_func[0] == 60 assert result_func[1] == 'value-sent-to-func' assert result_func[2] is False result_func = nspace['f1a'](65) assert nspace['func_A_was_called'] == 'func_was_called' assert result_func[0] == 65 assert result_func[1] == 'value-sent-to-func' global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen' result_func = list(nspace['f2'](110))[0] assert nspace['gen_was_called'] == 'gen_was_called' assert result_func[0] == 110 assert result_func[1] == 'value-sent-to-gen' assert result_func[2] is False result_func = list(nspace['f2a'](115))[0] assert nspace['gen_A_was_called'] == 'gen_was_called' assert result_func[0] == 115 assert result_func[1] == 'value-sent-to-gen' def test_global_user_namespace(): """ Basic test for ``global_user_namespace``. """ ns = {'ab': 1, 'cd': 2} global_user_namespace.set_user_namespace(user_ns=ns) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True) assert global_user_namespace.user_ns == {} assert global_user_namespace.use_ipython is True global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False <|reserved_special_token_0|> def _configure_happi(tmp_path, monkeypatch, json_devices): path_json = os.path.join(tmp_path, 'sim_devices.json') path_ini = os.path.join(tmp_path, 'happi.ini') happi_ini_text = f'[DEFAULT]\nbackend=json\npath={path_json}' with open(path_ini, 'w') as f: f.write(happi_ini_text) with open(path_json, 'w') as f: f.write(json_devices) monkeypatch.setenv('HAPPI_CFG', path_ini) @pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg' , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det', ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), ( 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), (( ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), ( 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), ('string', ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), (('det', 10), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 10, 'motor'), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 'det', (10, 'motor2')), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10 )), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 'motor2', 10)), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', 'motor10'), ( 'det', 'motor10'), {}, False, 'No devices with name'), (('det', 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'), (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), (( 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False, "No devices with name 'det' were found in Happi database."), (('motor3' ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor', 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor', 'Motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'moTor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '_motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', ' motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor ']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '2motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers')]) def test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg): """ Tests for ``load_devices_from_happi``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) if success: ns = {} dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args) assert len(ns) == len(loaded_names), str(ns) for d in loaded_names: assert d in ns assert set(dlist) == set(loaded_names) else: with pytest.raises(Exception, match=errmsg): ns = {} load_devices_from_happi(device_names, namespace=ns, **kw_args) def _test_loading(device_names, loaded_names): if success: load_devices_from_happi(device_names, namespace=locals(), **kw_args ) for d in loaded_names: assert d in locals() else: with pytest.raises(Exception, match=errmsg): load_devices_from_happi(device_names, namespace=locals(), **kw_args) _test_loading(device_names=device_names, loaded_names=loaded_names) def test_load_devices_from_happi_2_fail(tmp_path, monkeypatch): """ Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) with pytest.raises(TypeError, match= "missing 1 required keyword-only argument: 'namespace'"): load_devices_from_happi(['det', 'motor']) with pytest.raises(TypeError, match= "Parameter 'namespace' must be a dictionary"): load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3]) <|reserved_special_token_1|> <|reserved_special_token_0|> def create_local_imports_files(tmp_path): path_dir = os.path.join(tmp_path, 'dir_local_imports') fln_func = os.path.join(path_dir, 'file_func.py') fln_gen = os.path.join(path_dir, 'file_gen.py') os.makedirs(path_dir, exist_ok=True) code1 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f1(some_value, user_ns, ipython): user_ns["func_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f1a(some_value, user_ns): user_ns["func_A_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"]) """ with open(fln_func, 'w') as f: f.writelines(code1) code2 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f2(some_value, user_ns, ipython): user_ns["gen_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f2a(some_value, user_ns): user_ns["gen_A_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"]) @set_user_ns def f3(some_value, user_ns, ipython): user_ns["value_f3"] = some_value f3(91) """ with open(fln_gen, 'w') as f: f.writelines(code2) patch_code = """ from dir_local_imports.file_func import f1, f1a from dir_local_imports.file_gen import f2, f2a from bluesky_queueserver.manager.profile_tools import set_user_ns @set_user_ns def f4(some_value, user_ns, ipython): user_ns["value_f4"] = some_value f4(90) """ def test_set_user_ns_1(tmp_path): """ Tests for ``set_user_ns`` decorator. The functionality of the decorator is fully tested (only without IPython): - using ``global_user_namespace`` to pass values in and out of the function defined in the imported module (emulation of ``get_ipython().user_ns``). - checking if the function is executed from IPython (only for the function defined in the imported module). """ pc_path = copy_default_profile_collection(tmp_path) create_local_imports_files(pc_path) patch_first_startup_file(pc_path, patch_code) nspace = load_profile_collection(pc_path) assert len(nspace) > 0, 'Failed to load the profile collection' assert 'f1' in nspace, 'Test for local imports failed' assert 'f2' in nspace, 'Test for local imports failed' assert inspect.isgeneratorfunction(nspace['f1']) is False assert inspect.isgeneratorfunction(nspace['f2']) is True def check_signature(func): params = inspect.signature(func).parameters assert 'user_ns' not in params assert 'ipython' not in params check_signature(nspace['f1']) check_signature(nspace['f1a']) check_signature(nspace['f2']) check_signature(nspace['f2a']) assert nspace['value_f3'] == 91 assert nspace['value_f4'] == 90 global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False) global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func' assert nspace['v_from_namespace'] == 'value-sent-to-func' result_func = nspace['f1'](60) assert nspace['func_was_called'] == 'func_was_called' assert result_func[0] == 60 assert result_func[1] == 'value-sent-to-func' assert result_func[2] is False result_func = nspace['f1a'](65) assert nspace['func_A_was_called'] == 'func_was_called' assert result_func[0] == 65 assert result_func[1] == 'value-sent-to-func' global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen' result_func = list(nspace['f2'](110))[0] assert nspace['gen_was_called'] == 'gen_was_called' assert result_func[0] == 110 assert result_func[1] == 'value-sent-to-gen' assert result_func[2] is False result_func = list(nspace['f2a'](115))[0] assert nspace['gen_A_was_called'] == 'gen_was_called' assert result_func[0] == 115 assert result_func[1] == 'value-sent-to-gen' def test_global_user_namespace(): """ Basic test for ``global_user_namespace``. """ ns = {'ab': 1, 'cd': 2} global_user_namespace.set_user_namespace(user_ns=ns) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True) assert global_user_namespace.user_ns == {} assert global_user_namespace.use_ipython is True global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False _happi_json_db_1 = """ { "det": { "_id": "det", "active": true, "args": [], "device_class": "ophyd.sim.DetWithCountTime", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "det", "type": "OphydItem" }, "motor": { "_id": "motor", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoPosition", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor", "type": "OphydItem" }, "motor1": { "_id": "motor1", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoHints", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor1", "type": "OphydItem" }, "tst_motor2": { "_id": "tst_motor2", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoHints", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "tst_motor2", "type": "OphydItem" }, "motor3": { "_id": "motor3", "active": true, "args": [], "device_class": "ophyd.sim.SynAxis", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor3", "type": "OphydItem" }, "motor3_duplicate_error": { "_id": "motor3", "active": false, "args": [], "device_class": "ophyd.sim.SynAxis", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor3", "type": "OphydItem" } } """ def _configure_happi(tmp_path, monkeypatch, json_devices): path_json = os.path.join(tmp_path, 'sim_devices.json') path_ini = os.path.join(tmp_path, 'happi.ini') happi_ini_text = f'[DEFAULT]\nbackend=json\npath={path_json}' with open(path_ini, 'w') as f: f.write(happi_ini_text) with open(path_json, 'w') as f: f.write(json_devices) monkeypatch.setenv('HAPPI_CFG', path_ini) @pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg' , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det', ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), ( 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), (( ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), ( 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), ('string', ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), (('det', 10), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 10, 'motor'), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 'det', (10, 'motor2')), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10 )), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 'motor2', 10)), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', 'motor10'), ( 'det', 'motor10'), {}, False, 'No devices with name'), (('det', 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'), (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), (( 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False, "No devices with name 'det' were found in Happi database."), (('motor3' ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor', 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor', 'Motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'moTor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '_motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', ' motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor ']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '2motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers')]) def test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg): """ Tests for ``load_devices_from_happi``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) if success: ns = {} dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args) assert len(ns) == len(loaded_names), str(ns) for d in loaded_names: assert d in ns assert set(dlist) == set(loaded_names) else: with pytest.raises(Exception, match=errmsg): ns = {} load_devices_from_happi(device_names, namespace=ns, **kw_args) def _test_loading(device_names, loaded_names): if success: load_devices_from_happi(device_names, namespace=locals(), **kw_args ) for d in loaded_names: assert d in locals() else: with pytest.raises(Exception, match=errmsg): load_devices_from_happi(device_names, namespace=locals(), **kw_args) _test_loading(device_names=device_names, loaded_names=loaded_names) def test_load_devices_from_happi_2_fail(tmp_path, monkeypatch): """ Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) with pytest.raises(TypeError, match= "missing 1 required keyword-only argument: 'namespace'"): load_devices_from_happi(['det', 'motor']) with pytest.raises(TypeError, match= "Parameter 'namespace' must be a dictionary"): load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3]) <|reserved_special_token_1|> import os import inspect import pytest from ._common import copy_default_profile_collection, patch_first_startup_file from bluesky_queueserver.manager.profile_tools import global_user_namespace, load_devices_from_happi from bluesky_queueserver.manager.profile_ops import load_profile_collection def create_local_imports_files(tmp_path): path_dir = os.path.join(tmp_path, 'dir_local_imports') fln_func = os.path.join(path_dir, 'file_func.py') fln_gen = os.path.join(path_dir, 'file_gen.py') os.makedirs(path_dir, exist_ok=True) code1 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f1(some_value, user_ns, ipython): user_ns["func_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f1a(some_value, user_ns): user_ns["func_A_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"]) """ with open(fln_func, 'w') as f: f.writelines(code1) code2 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f2(some_value, user_ns, ipython): user_ns["gen_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f2a(some_value, user_ns): user_ns["gen_A_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"]) @set_user_ns def f3(some_value, user_ns, ipython): user_ns["value_f3"] = some_value f3(91) """ with open(fln_gen, 'w') as f: f.writelines(code2) patch_code = """ from dir_local_imports.file_func import f1, f1a from dir_local_imports.file_gen import f2, f2a from bluesky_queueserver.manager.profile_tools import set_user_ns @set_user_ns def f4(some_value, user_ns, ipython): user_ns["value_f4"] = some_value f4(90) """ def test_set_user_ns_1(tmp_path): """ Tests for ``set_user_ns`` decorator. The functionality of the decorator is fully tested (only without IPython): - using ``global_user_namespace`` to pass values in and out of the function defined in the imported module (emulation of ``get_ipython().user_ns``). - checking if the function is executed from IPython (only for the function defined in the imported module). """ pc_path = copy_default_profile_collection(tmp_path) create_local_imports_files(pc_path) patch_first_startup_file(pc_path, patch_code) nspace = load_profile_collection(pc_path) assert len(nspace) > 0, 'Failed to load the profile collection' assert 'f1' in nspace, 'Test for local imports failed' assert 'f2' in nspace, 'Test for local imports failed' assert inspect.isgeneratorfunction(nspace['f1']) is False assert inspect.isgeneratorfunction(nspace['f2']) is True def check_signature(func): params = inspect.signature(func).parameters assert 'user_ns' not in params assert 'ipython' not in params check_signature(nspace['f1']) check_signature(nspace['f1a']) check_signature(nspace['f2']) check_signature(nspace['f2a']) assert nspace['value_f3'] == 91 assert nspace['value_f4'] == 90 global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False) global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func' assert nspace['v_from_namespace'] == 'value-sent-to-func' result_func = nspace['f1'](60) assert nspace['func_was_called'] == 'func_was_called' assert result_func[0] == 60 assert result_func[1] == 'value-sent-to-func' assert result_func[2] is False result_func = nspace['f1a'](65) assert nspace['func_A_was_called'] == 'func_was_called' assert result_func[0] == 65 assert result_func[1] == 'value-sent-to-func' global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen' result_func = list(nspace['f2'](110))[0] assert nspace['gen_was_called'] == 'gen_was_called' assert result_func[0] == 110 assert result_func[1] == 'value-sent-to-gen' assert result_func[2] is False result_func = list(nspace['f2a'](115))[0] assert nspace['gen_A_was_called'] == 'gen_was_called' assert result_func[0] == 115 assert result_func[1] == 'value-sent-to-gen' def test_global_user_namespace(): """ Basic test for ``global_user_namespace``. """ ns = {'ab': 1, 'cd': 2} global_user_namespace.set_user_namespace(user_ns=ns) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True) assert global_user_namespace.user_ns == {} assert global_user_namespace.use_ipython is True global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False _happi_json_db_1 = """ { "det": { "_id": "det", "active": true, "args": [], "device_class": "ophyd.sim.DetWithCountTime", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "det", "type": "OphydItem" }, "motor": { "_id": "motor", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoPosition", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor", "type": "OphydItem" }, "motor1": { "_id": "motor1", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoHints", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor1", "type": "OphydItem" }, "tst_motor2": { "_id": "tst_motor2", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoHints", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "tst_motor2", "type": "OphydItem" }, "motor3": { "_id": "motor3", "active": true, "args": [], "device_class": "ophyd.sim.SynAxis", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor3", "type": "OphydItem" }, "motor3_duplicate_error": { "_id": "motor3", "active": false, "args": [], "device_class": "ophyd.sim.SynAxis", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor3", "type": "OphydItem" } } """ def _configure_happi(tmp_path, monkeypatch, json_devices): path_json = os.path.join(tmp_path, 'sim_devices.json') path_ini = os.path.join(tmp_path, 'happi.ini') happi_ini_text = f'[DEFAULT]\nbackend=json\npath={path_json}' with open(path_ini, 'w') as f: f.write(happi_ini_text) with open(path_json, 'w') as f: f.write(json_devices) monkeypatch.setenv('HAPPI_CFG', path_ini) @pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg' , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det', ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), ( 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), (( ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), ( 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), ('string', ('det', 'motor'), {}, False, "Parameter 'device_names' value must be a tuple or a list"), (('det', 10), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 10, 'motor'), ('det', 'motor'), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (( 'det', (10, 'motor2')), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10 )), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', ('tst_motor2', 'motor2', 10)), ('det', 'motor'), {}, False, 'element .* is expected to be in the form'), (('det', 'motor10'), ( 'det', 'motor10'), {}, False, 'No devices with name'), (('det', 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'), (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), (( 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False, "No devices with name 'det' were found in Happi database."), (('motor3' ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor', 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor', 'Motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'moTor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '_motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', ' motor']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor ']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', 'motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers'), (('det', ['motor', '2motor_$new']), ('det', 'motor'), {}, False, 'may consist of lowercase letters, numbers')]) def test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg): """ Tests for ``load_devices_from_happi``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) if success: ns = {} dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args) assert len(ns) == len(loaded_names), str(ns) for d in loaded_names: assert d in ns assert set(dlist) == set(loaded_names) else: with pytest.raises(Exception, match=errmsg): ns = {} load_devices_from_happi(device_names, namespace=ns, **kw_args) def _test_loading(device_names, loaded_names): if success: load_devices_from_happi(device_names, namespace=locals(), **kw_args ) for d in loaded_names: assert d in locals() else: with pytest.raises(Exception, match=errmsg): load_devices_from_happi(device_names, namespace=locals(), **kw_args) _test_loading(device_names=device_names, loaded_names=loaded_names) def test_load_devices_from_happi_2_fail(tmp_path, monkeypatch): """ Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) with pytest.raises(TypeError, match= "missing 1 required keyword-only argument: 'namespace'"): load_devices_from_happi(['det', 'motor']) with pytest.raises(TypeError, match= "Parameter 'namespace' must be a dictionary"): load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3]) <|reserved_special_token_1|> import os import inspect import pytest from ._common import copy_default_profile_collection, patch_first_startup_file from bluesky_queueserver.manager.profile_tools import global_user_namespace, load_devices_from_happi from bluesky_queueserver.manager.profile_ops import load_profile_collection def create_local_imports_files(tmp_path): path_dir = os.path.join(tmp_path, "dir_local_imports") fln_func = os.path.join(path_dir, "file_func.py") fln_gen = os.path.join(path_dir, "file_gen.py") os.makedirs(path_dir, exist_ok=True) # Create file1 code1 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f1(some_value, user_ns, ipython): user_ns["func_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f1a(some_value, user_ns): user_ns["func_A_was_called"] = "func_was_called" return (some_value, user_ns["v_from_namespace"]) """ with open(fln_func, "w") as f: f.writelines(code1) # Create file2 code2 = """ from bluesky_queueserver.manager.profile_tools import set_user_ns # Function that has the parameter 'ipython' @set_user_ns def f2(some_value, user_ns, ipython): user_ns["gen_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"], bool(ipython)) # Function that has no parameter 'ipython' @set_user_ns def f2a(some_value, user_ns): user_ns["gen_A_was_called"] = "gen_was_called" yield (some_value, user_ns["v_from_namespace"]) @set_user_ns def f3(some_value, user_ns, ipython): user_ns["value_f3"] = some_value f3(91) """ with open(fln_gen, "w") as f: f.writelines(code2) patch_code = """ from dir_local_imports.file_func import f1, f1a from dir_local_imports.file_gen import f2, f2a from bluesky_queueserver.manager.profile_tools import set_user_ns @set_user_ns def f4(some_value, user_ns, ipython): user_ns["value_f4"] = some_value f4(90) """ def test_set_user_ns_1(tmp_path): """ Tests for ``set_user_ns`` decorator. The functionality of the decorator is fully tested (only without IPython): - using ``global_user_namespace`` to pass values in and out of the function defined in the imported module (emulation of ``get_ipython().user_ns``). - checking if the function is executed from IPython (only for the function defined in the imported module). """ pc_path = copy_default_profile_collection(tmp_path) create_local_imports_files(pc_path) patch_first_startup_file(pc_path, patch_code) nspace = load_profile_collection(pc_path) assert len(nspace) > 0, "Failed to load the profile collection" assert "f1" in nspace, "Test for local imports failed" assert "f2" in nspace, "Test for local imports failed" # Test if the decorator `set_user_ns` does not change function type assert inspect.isgeneratorfunction(nspace["f1"]) is False assert inspect.isgeneratorfunction(nspace["f2"]) is True # Check if the extra arguments are removed from the function signature def check_signature(func): params = inspect.signature(func).parameters assert "user_ns" not in params assert "ipython" not in params check_signature(nspace["f1"]) check_signature(nspace["f1a"]) check_signature(nspace["f2"]) check_signature(nspace["f2a"]) assert nspace["value_f3"] == 91 assert nspace["value_f4"] == 90 # Test function global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False) global_user_namespace.user_ns["v_from_namespace"] = "value-sent-to-func" assert nspace["v_from_namespace"] == "value-sent-to-func" result_func = nspace["f1"](60) assert nspace["func_was_called"] == "func_was_called" assert result_func[0] == 60 assert result_func[1] == "value-sent-to-func" assert result_func[2] is False result_func = nspace["f1a"](65) assert nspace["func_A_was_called"] == "func_was_called" assert result_func[0] == 65 assert result_func[1] == "value-sent-to-func" # Test generator global_user_namespace.user_ns["v_from_namespace"] = "value-sent-to-gen" result_func = list(nspace["f2"](110))[0] assert nspace["gen_was_called"] == "gen_was_called" assert result_func[0] == 110 assert result_func[1] == "value-sent-to-gen" assert result_func[2] is False result_func = list(nspace["f2a"](115))[0] assert nspace["gen_A_was_called"] == "gen_was_called" assert result_func[0] == 115 assert result_func[1] == "value-sent-to-gen" def test_global_user_namespace(): """ Basic test for ``global_user_namespace``. """ ns = {"ab": 1, "cd": 2} global_user_namespace.set_user_namespace(user_ns=ns) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True) assert global_user_namespace.user_ns == {} assert global_user_namespace.use_ipython is True global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False) assert global_user_namespace.user_ns == ns assert global_user_namespace.use_ipython is False _happi_json_db_1 = """ { "det": { "_id": "det", "active": true, "args": [], "device_class": "ophyd.sim.DetWithCountTime", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "det", "type": "OphydItem" }, "motor": { "_id": "motor", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoPosition", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor", "type": "OphydItem" }, "motor1": { "_id": "motor1", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoHints", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor1", "type": "OphydItem" }, "tst_motor2": { "_id": "tst_motor2", "active": true, "args": [], "device_class": "ophyd.sim.SynAxisNoHints", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "tst_motor2", "type": "OphydItem" }, "motor3": { "_id": "motor3", "active": true, "args": [], "device_class": "ophyd.sim.SynAxis", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor3", "type": "OphydItem" }, "motor3_duplicate_error": { "_id": "motor3", "active": false, "args": [], "device_class": "ophyd.sim.SynAxis", "documentation": null, "kwargs": { "name": "{{name}}" }, "name": "motor3", "type": "OphydItem" } } """ def _configure_happi(tmp_path, monkeypatch, json_devices): path_json = os.path.join(tmp_path, "sim_devices.json") path_ini = os.path.join(tmp_path, "happi.ini") happi_ini_text = f"[DEFAULT]\nbackend=json\npath={path_json}" with open(path_ini, "w") as f: f.write(happi_ini_text) with open(path_json, "w") as f: f.write(json_devices) monkeypatch.setenv("HAPPI_CFG", path_ini) # fmt: off @pytest.mark.parametrize("device_names, loaded_names, kw_args, success, errmsg", [ ([], [], {}, True, ""), # No devices are loaded if the list of devices is empty (("det", "motor"), ("det", "motor"), {}, True, ""), (["det", "motor"], ("det", "motor"), {}, True, ""), ((("det", ""), ["motor", ""]), ("det", "motor"), {}, True, ""), (("det", ["motor", ""]), ("det", "motor"), {}, True, ""), (("det", ("motor", ""), ("tst_motor2", "motor2")), ("det", "motor", "motor2"), {}, True, ""), # This is not typical use case, but the same device may be loaded multiple times # with different names if needed. ((("motor1", "motor1_copy1"), ("motor1", "motor1_copy2")), ("motor1_copy1", "motor1_copy2"), {}, True, ""), # Incorrect type of the device list (10, ("det", "motor"), {}, False, "Parameter 'device_names' value must be a tuple or a list"), ("string", ("det", "motor"), {}, False, "Parameter 'device_names' value must be a tuple or a list"), # Incorrecty type or form of a device list element (("det", 10), ("det", "motor"), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), ((10, "motor"), ("det", "motor"), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"), (("det", (10, "motor2")), ("det", "motor"), {}, False, "element .* is expected to be in the form"), (("det", ("tst_motor2", 10)), ("det", "motor"), {}, False, "element .* is expected to be in the form"), (("det", ("tst_motor2", "motor2", 10)), ("det", "motor"), {}, False, "element .* is expected to be in the form"), # No device found (("det", "motor10"), ("det", "motor10"), {}, False, "No devices with name"), # Multiple devices found (search for "motor3" yields multile devices, this is database issue) (("det", "motor3"), ("det", "motor3"), {}, False, "Multiple devices with name"), # Use additional search parameters. (Two entries for "motor3" differ in the value of `active` field. # A single entry for `det` has `active==True`.) (("det", "motor3"), ("det", "motor3"), {"active": True}, True, ""), (("det", "motor3"), ("det", "motor3"), {"active": False}, False, "No devices with name 'det' were found in Happi database."), (("motor3",), ("motor3",), {"active": False}, True, ""), # Verify that valid device names are accepted (("det", ["motor", "motor3_new"]), ("det", "motor3_new"), {}, True, ""), # Invalid new device name (("det", ["motor", "Motor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), (("det", ["motor", "moTor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), (("det", ["motor", "_motor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), (("det", ["motor", " motor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), (("det", ["motor", "motor "]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), (("det", ["motor", "motor new"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), (("det", ["motor", "motor_$new"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), (("det", ["motor", "2motor_$new"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"), ]) # fmt: on def test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg): """ Tests for ``load_devices_from_happi``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) # Load as a dictionary if success: ns = {} dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args) assert len(ns) == len(loaded_names), str(ns) for d in loaded_names: assert d in ns assert set(dlist) == set(loaded_names) else: with pytest.raises(Exception, match=errmsg): ns = {} load_devices_from_happi(device_names, namespace=ns, **kw_args) # Load in local namespace def _test_loading(device_names, loaded_names): if success: load_devices_from_happi(device_names, namespace=locals(), **kw_args) for d in loaded_names: assert d in locals() else: with pytest.raises(Exception, match=errmsg): load_devices_from_happi(device_names, namespace=locals(), **kw_args) _test_loading(device_names=device_names, loaded_names=loaded_names) def test_load_devices_from_happi_2_fail(tmp_path, monkeypatch): """ Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``. """ _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1) # Missing 'namespace' parameter with pytest.raises(TypeError, match="missing 1 required keyword-only argument: 'namespace'"): load_devices_from_happi(["det", "motor"]) # Incorrect type of 'namespace' parameter with pytest.raises(TypeError, match="Parameter 'namespace' must be a dictionary"): load_devices_from_happi(["det", "motor"], namespace=[1, 2, 3])
flexible
{ "blob_id": "ad1ec5dd8fae290ab6cb73b17c5522e062261359", "index": 6698, "step-1": "<mask token>\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\n<mask token>\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n<mask token>\n\n\n@pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n", "step-2": "<mask token>\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\n<mask token>\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n<mask token>\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, 'sim_devices.json')\n path_ini = os.path.join(tmp_path, 'happi.ini')\n happi_ini_text = f'[DEFAULT]\\nbackend=json\\npath={path_json}'\n with open(path_ini, 'w') as f:\n f.write(happi_ini_text)\n with open(path_json, 'w') as f:\n f.write(json_devices)\n monkeypatch.setenv('HAPPI_CFG', path_ini)\n\n\n@pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n", "step-3": "<mask token>\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\npatch_code = \"\"\"\nfrom dir_local_imports.file_func import f1, f1a\nfrom dir_local_imports.file_gen import f2, f2a\n\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n@set_user_ns\ndef f4(some_value, user_ns, ipython):\n user_ns[\"value_f4\"] = some_value\n\nf4(90)\n\n\"\"\"\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n_happi_json_db_1 = \"\"\"\n{\n \"det\": {\n \"_id\": \"det\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.DetWithCountTime\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"det\",\n \"type\": \"OphydItem\"\n },\n \"motor\": {\n \"_id\": \"motor\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoPosition\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor\",\n \"type\": \"OphydItem\"\n },\n \"motor1\": {\n \"_id\": \"motor1\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor1\",\n \"type\": \"OphydItem\"\n },\n \"tst_motor2\": {\n \"_id\": \"tst_motor2\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"tst_motor2\",\n \"type\": \"OphydItem\"\n },\n \"motor3\": {\n \"_id\": \"motor3\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n },\n \"motor3_duplicate_error\": {\n \"_id\": \"motor3\",\n \"active\": false,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n }\n}\n\"\"\"\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, 'sim_devices.json')\n path_ini = os.path.join(tmp_path, 'happi.ini')\n happi_ini_text = f'[DEFAULT]\\nbackend=json\\npath={path_json}'\n with open(path_ini, 'w') as f:\n f.write(happi_ini_text)\n with open(path_json, 'w') as f:\n f.write(json_devices)\n monkeypatch.setenv('HAPPI_CFG', path_ini)\n\n\n@pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n", "step-4": "import os\nimport inspect\nimport pytest\nfrom ._common import copy_default_profile_collection, patch_first_startup_file\nfrom bluesky_queueserver.manager.profile_tools import global_user_namespace, load_devices_from_happi\nfrom bluesky_queueserver.manager.profile_ops import load_profile_collection\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\npatch_code = \"\"\"\nfrom dir_local_imports.file_func import f1, f1a\nfrom dir_local_imports.file_gen import f2, f2a\n\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n@set_user_ns\ndef f4(some_value, user_ns, ipython):\n user_ns[\"value_f4\"] = some_value\n\nf4(90)\n\n\"\"\"\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n_happi_json_db_1 = \"\"\"\n{\n \"det\": {\n \"_id\": \"det\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.DetWithCountTime\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"det\",\n \"type\": \"OphydItem\"\n },\n \"motor\": {\n \"_id\": \"motor\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoPosition\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor\",\n \"type\": \"OphydItem\"\n },\n \"motor1\": {\n \"_id\": \"motor1\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor1\",\n \"type\": \"OphydItem\"\n },\n \"tst_motor2\": {\n \"_id\": \"tst_motor2\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"tst_motor2\",\n \"type\": \"OphydItem\"\n },\n \"motor3\": {\n \"_id\": \"motor3\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n },\n \"motor3_duplicate_error\": {\n \"_id\": \"motor3\",\n \"active\": false,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n }\n}\n\"\"\"\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, 'sim_devices.json')\n path_ini = os.path.join(tmp_path, 'happi.ini')\n happi_ini_text = f'[DEFAULT]\\nbackend=json\\npath={path_json}'\n with open(path_ini, 'w') as f:\n f.write(happi_ini_text)\n with open(path_json, 'w') as f:\n f.write(json_devices)\n monkeypatch.setenv('HAPPI_CFG', path_ini)\n\n\n@pytest.mark.parametrize('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n", "step-5": "import os\nimport inspect\nimport pytest\n\nfrom ._common import copy_default_profile_collection, patch_first_startup_file\nfrom bluesky_queueserver.manager.profile_tools import global_user_namespace, load_devices_from_happi\nfrom bluesky_queueserver.manager.profile_ops import load_profile_collection\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, \"dir_local_imports\")\n fln_func = os.path.join(path_dir, \"file_func.py\")\n fln_gen = os.path.join(path_dir, \"file_gen.py\")\n\n os.makedirs(path_dir, exist_ok=True)\n\n # Create file1\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, \"w\") as f:\n f.writelines(code1)\n\n # Create file2\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, \"w\") as f:\n f.writelines(code2)\n\n\npatch_code = \"\"\"\nfrom dir_local_imports.file_func import f1, f1a\nfrom dir_local_imports.file_gen import f2, f2a\n\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n@set_user_ns\ndef f4(some_value, user_ns, ipython):\n user_ns[\"value_f4\"] = some_value\n\nf4(90)\n\n\"\"\"\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, \"Failed to load the profile collection\"\n assert \"f1\" in nspace, \"Test for local imports failed\"\n assert \"f2\" in nspace, \"Test for local imports failed\"\n\n # Test if the decorator `set_user_ns` does not change function type\n assert inspect.isgeneratorfunction(nspace[\"f1\"]) is False\n assert inspect.isgeneratorfunction(nspace[\"f2\"]) is True\n\n # Check if the extra arguments are removed from the function signature\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert \"user_ns\" not in params\n assert \"ipython\" not in params\n\n check_signature(nspace[\"f1\"])\n check_signature(nspace[\"f1a\"])\n check_signature(nspace[\"f2\"])\n check_signature(nspace[\"f2a\"])\n\n assert nspace[\"value_f3\"] == 91\n assert nspace[\"value_f4\"] == 90\n\n # Test function\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns[\"v_from_namespace\"] = \"value-sent-to-func\"\n assert nspace[\"v_from_namespace\"] == \"value-sent-to-func\"\n\n result_func = nspace[\"f1\"](60)\n assert nspace[\"func_was_called\"] == \"func_was_called\"\n assert result_func[0] == 60\n assert result_func[1] == \"value-sent-to-func\"\n assert result_func[2] is False\n\n result_func = nspace[\"f1a\"](65)\n assert nspace[\"func_A_was_called\"] == \"func_was_called\"\n assert result_func[0] == 65\n assert result_func[1] == \"value-sent-to-func\"\n\n # Test generator\n global_user_namespace.user_ns[\"v_from_namespace\"] = \"value-sent-to-gen\"\n result_func = list(nspace[\"f2\"](110))[0]\n assert nspace[\"gen_was_called\"] == \"gen_was_called\"\n assert result_func[0] == 110\n assert result_func[1] == \"value-sent-to-gen\"\n assert result_func[2] is False\n\n result_func = list(nspace[\"f2a\"](115))[0]\n assert nspace[\"gen_A_was_called\"] == \"gen_was_called\"\n assert result_func[0] == 115\n assert result_func[1] == \"value-sent-to-gen\"\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {\"ab\": 1, \"cd\": 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n_happi_json_db_1 = \"\"\"\n{\n \"det\": {\n \"_id\": \"det\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.DetWithCountTime\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"det\",\n \"type\": \"OphydItem\"\n },\n \"motor\": {\n \"_id\": \"motor\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoPosition\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor\",\n \"type\": \"OphydItem\"\n },\n \"motor1\": {\n \"_id\": \"motor1\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor1\",\n \"type\": \"OphydItem\"\n },\n \"tst_motor2\": {\n \"_id\": \"tst_motor2\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"tst_motor2\",\n \"type\": \"OphydItem\"\n },\n \"motor3\": {\n \"_id\": \"motor3\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n },\n \"motor3_duplicate_error\": {\n \"_id\": \"motor3\",\n \"active\": false,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n }\n}\n\"\"\"\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, \"sim_devices.json\")\n path_ini = os.path.join(tmp_path, \"happi.ini\")\n\n happi_ini_text = f\"[DEFAULT]\\nbackend=json\\npath={path_json}\"\n\n with open(path_ini, \"w\") as f:\n f.write(happi_ini_text)\n\n with open(path_json, \"w\") as f:\n f.write(json_devices)\n\n monkeypatch.setenv(\"HAPPI_CFG\", path_ini)\n\n\n# fmt: off\n@pytest.mark.parametrize(\"device_names, loaded_names, kw_args, success, errmsg\", [\n ([], [], {}, True, \"\"), # No devices are loaded if the list of devices is empty\n ((\"det\", \"motor\"), (\"det\", \"motor\"), {}, True, \"\"),\n ([\"det\", \"motor\"], (\"det\", \"motor\"), {}, True, \"\"),\n (((\"det\", \"\"), [\"motor\", \"\"]), (\"det\", \"motor\"), {}, True, \"\"),\n ((\"det\", [\"motor\", \"\"]), (\"det\", \"motor\"), {}, True, \"\"),\n ((\"det\", (\"motor\", \"\"), (\"tst_motor2\", \"motor2\")), (\"det\", \"motor\", \"motor2\"), {}, True, \"\"),\n # This is not typical use case, but the same device may be loaded multiple times\n # with different names if needed.\n (((\"motor1\", \"motor1_copy1\"), (\"motor1\", \"motor1_copy2\")), (\"motor1_copy1\", \"motor1_copy2\"), {}, True, \"\"),\n # Incorrect type of the device list\n (10, (\"det\", \"motor\"), {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n (\"string\", (\"det\", \"motor\"), {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n # Incorrecty type or form of a device list element\n ((\"det\", 10), (\"det\", \"motor\"), {}, False, \"Parameter 'device_names': element .* must be str, tuple or list\"),\n ((10, \"motor\"), (\"det\", \"motor\"), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"),\n ((\"det\", (10, \"motor2\")), (\"det\", \"motor\"), {}, False, \"element .* is expected to be in the form\"),\n ((\"det\", (\"tst_motor2\", 10)), (\"det\", \"motor\"), {}, False, \"element .* is expected to be in the form\"),\n ((\"det\", (\"tst_motor2\", \"motor2\", 10)), (\"det\", \"motor\"), {}, False,\n \"element .* is expected to be in the form\"),\n # No device found\n ((\"det\", \"motor10\"), (\"det\", \"motor10\"), {}, False, \"No devices with name\"),\n # Multiple devices found (search for \"motor3\" yields multile devices, this is database issue)\n ((\"det\", \"motor3\"), (\"det\", \"motor3\"), {}, False, \"Multiple devices with name\"),\n # Use additional search parameters. (Two entries for \"motor3\" differ in the value of `active` field.\n # A single entry for `det` has `active==True`.)\n ((\"det\", \"motor3\"), (\"det\", \"motor3\"), {\"active\": True}, True, \"\"),\n ((\"det\", \"motor3\"), (\"det\", \"motor3\"), {\"active\": False}, False,\n \"No devices with name 'det' were found in Happi database.\"),\n ((\"motor3\",), (\"motor3\",), {\"active\": False}, True, \"\"),\n # Verify that valid device names are accepted\n ((\"det\", [\"motor\", \"motor3_new\"]), (\"det\", \"motor3_new\"), {}, True, \"\"),\n # Invalid new device name\n ((\"det\", [\"motor\", \"Motor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"moTor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"_motor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \" motor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"motor \"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"motor new\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"motor_$new\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"2motor_$new\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n])\n# fmt: on\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n\n # Load as a dictionary\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n # Load in local namespace\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args)\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(), **kw_args)\n\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n\n # Missing 'namespace' parameter\n with pytest.raises(TypeError, match=\"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi([\"det\", \"motor\"])\n\n # Incorrect type of 'namespace' parameter\n with pytest.raises(TypeError, match=\"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi([\"det\", \"motor\"], namespace=[1, 2, 3])\n", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(): config = {'spark.jars.packages': 'io.delta:delta-core_2.12:0.8.0,org.postgresql:postgresql:9.4.1211,org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0' , 'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension', 'spark.driver.memory': '8g', 'spark.sql.catalog.spark_catalog': 'org.apache.spark.sql.delta.catalog.DeltaCatalog', Constants. DELTA_SRC_PATH: Constants.DELTA_LOCATION, Constants.POSTGRESQL_DB: Constants.POSTGRESQL_DB_VALUE, Constants.POSTGRESQL_USER: Constants .POSTGRESQL_USER_VALUE, Constants.POSTGRESQL_PASSWORD: Constants. POSTGRESQL_PASSWORD_VALUE, Constants.POSTGRESQL_HOST: Constants. POSTGRESQL_HOST_VALUE, Constants.KAFKA_SERVER: Constants. KAFKA_SERVER_NAME} spark_configuration = SparkConfiguration(app_name= 'visits_ads_event_ingestion', spark_master='local[4]', log_level= 'WARN', configuration=config) import main.orchestrator as Orchestrator visits_schema = StructType([StructField('id_user', IntegerType(), False ), StructField('id_video', IntegerType(), False), StructField( 'id_device', IntegerType(), False), StructField('id_location', IntegerType(), False), StructField('visit_date', TimestampType(), True)]) visits_stream = KafkaConnector(spark_configuration).get_stream('visits', start_from_begining=False).load() visits_stream = extract_json_data(visits_stream, visits_schema) visits_stream.writeStream.option('checkpointLocation', 'checkpoint/visits' ).foreachBatch(lambda visits_batch, index: Orchestrator. ingest_visits(visits_batch, spark_configuration, index)).start() spark_configuration.spark_session.streams.awaitAnyTermination() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(): config = {'spark.jars.packages': 'io.delta:delta-core_2.12:0.8.0,org.postgresql:postgresql:9.4.1211,org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0' , 'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension', 'spark.driver.memory': '8g', 'spark.sql.catalog.spark_catalog': 'org.apache.spark.sql.delta.catalog.DeltaCatalog', Constants. DELTA_SRC_PATH: Constants.DELTA_LOCATION, Constants.POSTGRESQL_DB: Constants.POSTGRESQL_DB_VALUE, Constants.POSTGRESQL_USER: Constants .POSTGRESQL_USER_VALUE, Constants.POSTGRESQL_PASSWORD: Constants. POSTGRESQL_PASSWORD_VALUE, Constants.POSTGRESQL_HOST: Constants. POSTGRESQL_HOST_VALUE, Constants.KAFKA_SERVER: Constants. KAFKA_SERVER_NAME} spark_configuration = SparkConfiguration(app_name= 'visits_ads_event_ingestion', spark_master='local[4]', log_level= 'WARN', configuration=config) import main.orchestrator as Orchestrator visits_schema = StructType([StructField('id_user', IntegerType(), False ), StructField('id_video', IntegerType(), False), StructField( 'id_device', IntegerType(), False), StructField('id_location', IntegerType(), False), StructField('visit_date', TimestampType(), True)]) visits_stream = KafkaConnector(spark_configuration).get_stream('visits', start_from_begining=False).load() visits_stream = extract_json_data(visits_stream, visits_schema) visits_stream.writeStream.option('checkpointLocation', 'checkpoint/visits' ).foreachBatch(lambda visits_batch, index: Orchestrator. ingest_visits(visits_batch, spark_configuration, index)).start() spark_configuration.spark_session.streams.awaitAnyTermination() if __name__ == '__main__': main() <|reserved_special_token_1|> from pyspark.sql.types import StructType, StructField, StringType, TimestampType, IntegerType from main.config.spark_config import SparkConfiguration import main.config.constants as Constants from main.connectors.kafka_connector import KafkaConnector, extract_json_data def main(): config = {'spark.jars.packages': 'io.delta:delta-core_2.12:0.8.0,org.postgresql:postgresql:9.4.1211,org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0' , 'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension', 'spark.driver.memory': '8g', 'spark.sql.catalog.spark_catalog': 'org.apache.spark.sql.delta.catalog.DeltaCatalog', Constants. DELTA_SRC_PATH: Constants.DELTA_LOCATION, Constants.POSTGRESQL_DB: Constants.POSTGRESQL_DB_VALUE, Constants.POSTGRESQL_USER: Constants .POSTGRESQL_USER_VALUE, Constants.POSTGRESQL_PASSWORD: Constants. POSTGRESQL_PASSWORD_VALUE, Constants.POSTGRESQL_HOST: Constants. POSTGRESQL_HOST_VALUE, Constants.KAFKA_SERVER: Constants. KAFKA_SERVER_NAME} spark_configuration = SparkConfiguration(app_name= 'visits_ads_event_ingestion', spark_master='local[4]', log_level= 'WARN', configuration=config) import main.orchestrator as Orchestrator visits_schema = StructType([StructField('id_user', IntegerType(), False ), StructField('id_video', IntegerType(), False), StructField( 'id_device', IntegerType(), False), StructField('id_location', IntegerType(), False), StructField('visit_date', TimestampType(), True)]) visits_stream = KafkaConnector(spark_configuration).get_stream('visits', start_from_begining=False).load() visits_stream = extract_json_data(visits_stream, visits_schema) visits_stream.writeStream.option('checkpointLocation', 'checkpoint/visits' ).foreachBatch(lambda visits_batch, index: Orchestrator. ingest_visits(visits_batch, spark_configuration, index)).start() spark_configuration.spark_session.streams.awaitAnyTermination() if __name__ == '__main__': main() <|reserved_special_token_1|> from pyspark.sql.types import StructType, StructField, StringType, TimestampType, IntegerType from main.config.spark_config import SparkConfiguration import main.config.constants as Constants from main.connectors.kafka_connector import KafkaConnector, extract_json_data def main(): # Configure Spark Session config = { "spark.jars.packages": "io.delta:delta-core_2.12:0.8.0," "org.postgresql:postgresql:9.4.1211," "org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0," "org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0", "spark.sql.extensions": "io.delta.sql.DeltaSparkSessionExtension", "spark.driver.memory": "8g", "spark.sql.catalog.spark_catalog": "org.apache.spark.sql.delta.catalog.DeltaCatalog", Constants.DELTA_SRC_PATH: Constants.DELTA_LOCATION, Constants.POSTGRESQL_DB: Constants.POSTGRESQL_DB_VALUE, Constants.POSTGRESQL_USER: Constants.POSTGRESQL_USER_VALUE, Constants.POSTGRESQL_PASSWORD: Constants.POSTGRESQL_PASSWORD_VALUE, Constants.POSTGRESQL_HOST: Constants.POSTGRESQL_HOST_VALUE, Constants.KAFKA_SERVER: Constants.KAFKA_SERVER_NAME, } spark_configuration = SparkConfiguration(app_name="visits_ads_event_ingestion", spark_master="local[4]", log_level="WARN", configuration=config) import main.orchestrator as Orchestrator ######################## # Visit events ingestion ######################## visits_schema = StructType([ StructField('id_user', IntegerType(), False), StructField('id_video', IntegerType(), False), StructField('id_device', IntegerType(), False), StructField('id_location', IntegerType(), False), StructField('visit_date', TimestampType(), True) ]) visits_stream = KafkaConnector(spark_configuration).get_stream('visits', start_from_begining=False).load() visits_stream = extract_json_data(visits_stream, visits_schema) # For each micro-batch of visit events visits_stream.writeStream \ .option("checkpointLocation", "checkpoint/visits") \ .foreachBatch(lambda visits_batch, index: Orchestrator.ingest_visits(visits_batch, spark_configuration, index))\ .start() # Await stream termination spark_configuration.spark_session.streams.awaitAnyTermination() if __name__ == "__main__": main()
flexible
{ "blob_id": "23099b29fb5898c2556d1612690e33860662ca35", "index": 9846, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef main():\n config = {'spark.jars.packages':\n 'io.delta:delta-core_2.12:0.8.0,org.postgresql:postgresql:9.4.1211,org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0'\n , 'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension',\n 'spark.driver.memory': '8g', 'spark.sql.catalog.spark_catalog':\n 'org.apache.spark.sql.delta.catalog.DeltaCatalog', Constants.\n DELTA_SRC_PATH: Constants.DELTA_LOCATION, Constants.POSTGRESQL_DB:\n Constants.POSTGRESQL_DB_VALUE, Constants.POSTGRESQL_USER: Constants\n .POSTGRESQL_USER_VALUE, Constants.POSTGRESQL_PASSWORD: Constants.\n POSTGRESQL_PASSWORD_VALUE, Constants.POSTGRESQL_HOST: Constants.\n POSTGRESQL_HOST_VALUE, Constants.KAFKA_SERVER: Constants.\n KAFKA_SERVER_NAME}\n spark_configuration = SparkConfiguration(app_name=\n 'visits_ads_event_ingestion', spark_master='local[4]', log_level=\n 'WARN', configuration=config)\n import main.orchestrator as Orchestrator\n visits_schema = StructType([StructField('id_user', IntegerType(), False\n ), StructField('id_video', IntegerType(), False), StructField(\n 'id_device', IntegerType(), False), StructField('id_location',\n IntegerType(), False), StructField('visit_date', TimestampType(), \n True)])\n visits_stream = KafkaConnector(spark_configuration).get_stream('visits',\n start_from_begining=False).load()\n visits_stream = extract_json_data(visits_stream, visits_schema)\n visits_stream.writeStream.option('checkpointLocation', 'checkpoint/visits'\n ).foreachBatch(lambda visits_batch, index: Orchestrator.\n ingest_visits(visits_batch, spark_configuration, index)).start()\n spark_configuration.spark_session.streams.awaitAnyTermination()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef main():\n config = {'spark.jars.packages':\n 'io.delta:delta-core_2.12:0.8.0,org.postgresql:postgresql:9.4.1211,org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0'\n , 'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension',\n 'spark.driver.memory': '8g', 'spark.sql.catalog.spark_catalog':\n 'org.apache.spark.sql.delta.catalog.DeltaCatalog', Constants.\n DELTA_SRC_PATH: Constants.DELTA_LOCATION, Constants.POSTGRESQL_DB:\n Constants.POSTGRESQL_DB_VALUE, Constants.POSTGRESQL_USER: Constants\n .POSTGRESQL_USER_VALUE, Constants.POSTGRESQL_PASSWORD: Constants.\n POSTGRESQL_PASSWORD_VALUE, Constants.POSTGRESQL_HOST: Constants.\n POSTGRESQL_HOST_VALUE, Constants.KAFKA_SERVER: Constants.\n KAFKA_SERVER_NAME}\n spark_configuration = SparkConfiguration(app_name=\n 'visits_ads_event_ingestion', spark_master='local[4]', log_level=\n 'WARN', configuration=config)\n import main.orchestrator as Orchestrator\n visits_schema = StructType([StructField('id_user', IntegerType(), False\n ), StructField('id_video', IntegerType(), False), StructField(\n 'id_device', IntegerType(), False), StructField('id_location',\n IntegerType(), False), StructField('visit_date', TimestampType(), \n True)])\n visits_stream = KafkaConnector(spark_configuration).get_stream('visits',\n start_from_begining=False).load()\n visits_stream = extract_json_data(visits_stream, visits_schema)\n visits_stream.writeStream.option('checkpointLocation', 'checkpoint/visits'\n ).foreachBatch(lambda visits_batch, index: Orchestrator.\n ingest_visits(visits_batch, spark_configuration, index)).start()\n spark_configuration.spark_session.streams.awaitAnyTermination()\n\n\nif __name__ == '__main__':\n main()\n", "step-4": "from pyspark.sql.types import StructType, StructField, StringType, TimestampType, IntegerType\nfrom main.config.spark_config import SparkConfiguration\nimport main.config.constants as Constants\nfrom main.connectors.kafka_connector import KafkaConnector, extract_json_data\n\n\ndef main():\n config = {'spark.jars.packages':\n 'io.delta:delta-core_2.12:0.8.0,org.postgresql:postgresql:9.4.1211,org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0'\n , 'spark.sql.extensions': 'io.delta.sql.DeltaSparkSessionExtension',\n 'spark.driver.memory': '8g', 'spark.sql.catalog.spark_catalog':\n 'org.apache.spark.sql.delta.catalog.DeltaCatalog', Constants.\n DELTA_SRC_PATH: Constants.DELTA_LOCATION, Constants.POSTGRESQL_DB:\n Constants.POSTGRESQL_DB_VALUE, Constants.POSTGRESQL_USER: Constants\n .POSTGRESQL_USER_VALUE, Constants.POSTGRESQL_PASSWORD: Constants.\n POSTGRESQL_PASSWORD_VALUE, Constants.POSTGRESQL_HOST: Constants.\n POSTGRESQL_HOST_VALUE, Constants.KAFKA_SERVER: Constants.\n KAFKA_SERVER_NAME}\n spark_configuration = SparkConfiguration(app_name=\n 'visits_ads_event_ingestion', spark_master='local[4]', log_level=\n 'WARN', configuration=config)\n import main.orchestrator as Orchestrator\n visits_schema = StructType([StructField('id_user', IntegerType(), False\n ), StructField('id_video', IntegerType(), False), StructField(\n 'id_device', IntegerType(), False), StructField('id_location',\n IntegerType(), False), StructField('visit_date', TimestampType(), \n True)])\n visits_stream = KafkaConnector(spark_configuration).get_stream('visits',\n start_from_begining=False).load()\n visits_stream = extract_json_data(visits_stream, visits_schema)\n visits_stream.writeStream.option('checkpointLocation', 'checkpoint/visits'\n ).foreachBatch(lambda visits_batch, index: Orchestrator.\n ingest_visits(visits_batch, spark_configuration, index)).start()\n spark_configuration.spark_session.streams.awaitAnyTermination()\n\n\nif __name__ == '__main__':\n main()\n", "step-5": "from pyspark.sql.types import StructType, StructField, StringType, TimestampType, IntegerType\nfrom main.config.spark_config import SparkConfiguration\nimport main.config.constants as Constants\nfrom main.connectors.kafka_connector import KafkaConnector, extract_json_data\n\n\ndef main():\n # Configure Spark Session\n config = {\n \"spark.jars.packages\": \"io.delta:delta-core_2.12:0.8.0,\"\n \"org.postgresql:postgresql:9.4.1211,\"\n \"org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0,\"\n \"org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0\",\n \"spark.sql.extensions\": \"io.delta.sql.DeltaSparkSessionExtension\",\n \"spark.driver.memory\": \"8g\",\n \"spark.sql.catalog.spark_catalog\": \"org.apache.spark.sql.delta.catalog.DeltaCatalog\",\n Constants.DELTA_SRC_PATH: Constants.DELTA_LOCATION,\n Constants.POSTGRESQL_DB: Constants.POSTGRESQL_DB_VALUE,\n Constants.POSTGRESQL_USER: Constants.POSTGRESQL_USER_VALUE,\n Constants.POSTGRESQL_PASSWORD: Constants.POSTGRESQL_PASSWORD_VALUE,\n Constants.POSTGRESQL_HOST: Constants.POSTGRESQL_HOST_VALUE,\n Constants.KAFKA_SERVER: Constants.KAFKA_SERVER_NAME,\n }\n spark_configuration = SparkConfiguration(app_name=\"visits_ads_event_ingestion\", spark_master=\"local[4]\",\n log_level=\"WARN\", configuration=config)\n import main.orchestrator as Orchestrator\n\n ########################\n # Visit events ingestion\n ########################\n\n visits_schema = StructType([\n StructField('id_user', IntegerType(), False),\n StructField('id_video', IntegerType(), False),\n StructField('id_device', IntegerType(), False),\n StructField('id_location', IntegerType(), False),\n StructField('visit_date', TimestampType(), True)\n ])\n visits_stream = KafkaConnector(spark_configuration).get_stream('visits', start_from_begining=False).load()\n visits_stream = extract_json_data(visits_stream, visits_schema)\n\n # For each micro-batch of visit events\n visits_stream.writeStream \\\n .option(\"checkpointLocation\", \"checkpoint/visits\") \\\n .foreachBatch(lambda visits_batch, index: Orchestrator.ingest_visits(visits_batch, spark_configuration, index))\\\n .start()\n\n # Await stream termination\n spark_configuration.spark_session.streams.awaitAnyTermination()\n\n\nif __name__ == \"__main__\":\n main()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> print(msg * copies) <|reserved_special_token_1|> <|reserved_special_token_0|> name = input('Enter your name : ') age = int(input('Enter your age : ')) year = int(100 - age + datetime.now().year) copies = int(input('How many copies of the above message do you want? : ')) msg = name + ' will turn 100 years old in ' + str(year) + '\n' print(msg * copies) <|reserved_special_token_1|> from datetime import datetime name = input('Enter your name : ') age = int(input('Enter your age : ')) year = int(100 - age + datetime.now().year) copies = int(input('How many copies of the above message do you want? : ')) msg = name + ' will turn 100 years old in ' + str(year) + '\n' print(msg * copies) <|reserved_special_token_1|> # Ques1: # To create a program that asks the user to enter their name and their age # and prints out a message addressed to them that tells them the year that # they will turn 100 years old. Additionally, the program asks the user for # another number and prints out that many copies of the previous message on # separate lines. from datetime import datetime name = input('Enter your name : ') age = int(input('Enter your age : ')) year = int((100-age) + datetime.now().year) copies = int(input('How many copies of the above message do you want? : ')) msg = name + " will turn 100 years old in " + str(year) + "\n" print(msg * copies)
flexible
{ "blob_id": "948b793359555f98872e0bdbf6db970ed1ff3b83", "index": 7046, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(msg * copies)\n", "step-3": "<mask token>\nname = input('Enter your name : ')\nage = int(input('Enter your age : '))\nyear = int(100 - age + datetime.now().year)\ncopies = int(input('How many copies of the above message do you want? : '))\nmsg = name + ' will turn 100 years old in ' + str(year) + '\\n'\nprint(msg * copies)\n", "step-4": "from datetime import datetime\nname = input('Enter your name : ')\nage = int(input('Enter your age : '))\nyear = int(100 - age + datetime.now().year)\ncopies = int(input('How many copies of the above message do you want? : '))\nmsg = name + ' will turn 100 years old in ' + str(year) + '\\n'\nprint(msg * copies)\n", "step-5": "# Ques1:\r\n# To create a program that asks the user to enter their name and their age \r\n# and prints out a message addressed to them that tells them the year that \r\n# they will turn 100 years old. Additionally, the program asks the user for \r\n# another number and prints out that many copies of the previous message on \r\n# separate lines.\r\n\r\nfrom datetime import datetime\r\nname = input('Enter your name : ')\r\nage = int(input('Enter your age : '))\r\nyear = int((100-age) + datetime.now().year)\r\ncopies = int(input('How many copies of the above message do you want? : '))\r\nmsg = name + \" will turn 100 years old in \" + str(year) + \"\\n\" \r\nprint(msg * copies)\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def gprmc_convert(line): """Translates $GPRMC line into documented array str line - the GPRMC line returns - the data documented into array """ gps = line.strip().split(',') if gps[2] == 'V': return raw_date = gps[9] time = '' date = raw_date[0:2] month = raw_date[2:4] year = raw_date[4:] time += date + '/' + month + '/20' + year return [time] <|reserved_special_token_0|> def gpgga_convert(line): """Translates $GPGGPA line into documented array str line - the GPGGA line returns - the data documented into array """ gps = line.strip().split(',') if gps[6] == '0': return fix = '' if gps[6] == '1': fix = 'GPS fix' elif gps[6] == '2': fix = 'DGPS fix' elif gps[6] == '4': fix = 'RTK Fix coordinate (centimeter precision)' elif gps[6] == '5': fix = 'RTK Float (decimeter precision)' lat = ddm_dd_convert(gps[2], gps[3]) long = ddm_dd_convert(gps[4], gps[5]) return [lat, long, fix] <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def gprmc_convert(line): """Translates $GPRMC line into documented array str line - the GPRMC line returns - the data documented into array """ gps = line.strip().split(',') if gps[2] == 'V': return raw_date = gps[9] time = '' date = raw_date[0:2] month = raw_date[2:4] year = raw_date[4:] time += date + '/' + month + '/20' + year return [time] def gpvtg_convert(line): """Translates $GPVTG line into documented array Data only used for measuring ground speed str line - the GPVTG line returns - the data documented into array """ gps = line.strip().split(',') if gps[1] == '0.00': return return [] def gpgga_convert(line): """Translates $GPGGPA line into documented array str line - the GPGGA line returns - the data documented into array """ gps = line.strip().split(',') if gps[6] == '0': return fix = '' if gps[6] == '1': fix = 'GPS fix' elif gps[6] == '2': fix = 'DGPS fix' elif gps[6] == '4': fix = 'RTK Fix coordinate (centimeter precision)' elif gps[6] == '5': fix = 'RTK Float (decimeter precision)' lat = ddm_dd_convert(gps[2], gps[3]) long = ddm_dd_convert(gps[4], gps[5]) return [lat, long, fix] <|reserved_special_token_0|> <|reserved_special_token_1|> def ddm_dd_convert(coord, direction): """Converts GPS reading from DDM to DD str coord - the ddm coordinate from $GPGGA str direction - the direction of the coord (N,S,W,E) returns - string representation of dd coordinate """ value = '' if direction == 'S' or direction == 'W': value += '-' value += coord[0:-7] minute = float(coord[-7:]) decimal = round(minute / 60, 8) result = str(decimal)[1:] value += result return value def gprmc_convert(line): """Translates $GPRMC line into documented array str line - the GPRMC line returns - the data documented into array """ gps = line.strip().split(',') if gps[2] == 'V': return raw_date = gps[9] time = '' date = raw_date[0:2] month = raw_date[2:4] year = raw_date[4:] time += date + '/' + month + '/20' + year return [time] def gpvtg_convert(line): """Translates $GPVTG line into documented array Data only used for measuring ground speed str line - the GPVTG line returns - the data documented into array """ gps = line.strip().split(',') if gps[1] == '0.00': return return [] def gpgga_convert(line): """Translates $GPGGPA line into documented array str line - the GPGGA line returns - the data documented into array """ gps = line.strip().split(',') if gps[6] == '0': return fix = '' if gps[6] == '1': fix = 'GPS fix' elif gps[6] == '2': fix = 'DGPS fix' elif gps[6] == '4': fix = 'RTK Fix coordinate (centimeter precision)' elif gps[6] == '5': fix = 'RTK Float (decimeter precision)' lat = ddm_dd_convert(gps[2], gps[3]) long = ddm_dd_convert(gps[4], gps[5]) return [lat, long, fix] <|reserved_special_token_0|> <|reserved_special_token_1|> def ddm_dd_convert(coord, direction): """Converts GPS reading from DDM to DD str coord - the ddm coordinate from $GPGGA str direction - the direction of the coord (N,S,W,E) returns - string representation of dd coordinate """ value = '' if direction == 'S' or direction == 'W': value += '-' value += coord[0:-7] minute = float(coord[-7:]) decimal = round(minute / 60, 8) result = str(decimal)[1:] value += result return value def gprmc_convert(line): """Translates $GPRMC line into documented array str line - the GPRMC line returns - the data documented into array """ gps = line.strip().split(',') if gps[2] == 'V': return raw_date = gps[9] time = '' date = raw_date[0:2] month = raw_date[2:4] year = raw_date[4:] time += date + '/' + month + '/20' + year return [time] def gpvtg_convert(line): """Translates $GPVTG line into documented array Data only used for measuring ground speed str line - the GPVTG line returns - the data documented into array """ gps = line.strip().split(',') if gps[1] == '0.00': return return [] def gpgga_convert(line): """Translates $GPGGPA line into documented array str line - the GPGGA line returns - the data documented into array """ gps = line.strip().split(',') if gps[6] == '0': return fix = '' if gps[6] == '1': fix = 'GPS fix' elif gps[6] == '2': fix = 'DGPS fix' elif gps[6] == '4': fix = 'RTK Fix coordinate (centimeter precision)' elif gps[6] == '5': fix = 'RTK Float (decimeter precision)' lat = ddm_dd_convert(gps[2], gps[3]) long = ddm_dd_convert(gps[4], gps[5]) return [lat, long, fix] def gpgsa_convert(line): """Translates $GPGSA line into documented array str line - the GPGSA line returns - the data documented into array """ gps = line.strip().split(',') if gps[2] == '1': return if gps[2] == '2': fix = '2D fix' else: fix = '3D fix' return [fix] <|reserved_special_token_1|> def ddm_dd_convert(coord, direction): """Converts GPS reading from DDM to DD str coord - the ddm coordinate from $GPGGA str direction - the direction of the coord (N,S,W,E) returns - string representation of dd coordinate """ value = '' if (direction == 'S' or direction == 'W'): value += '-' value += coord[0:-7] minute = float(coord[-7:]) decimal = round(minute / 60, 8) result = str(decimal)[1:] value += result return value def gprmc_convert(line): """Translates $GPRMC line into documented array str line - the GPRMC line returns - the data documented into array """ gps = line.strip().split(',') #check data if gps[2] == 'V': return raw_date = gps[9] time = '' date = raw_date[0:2] month = raw_date[2:4] year = raw_date[4:] #modify year if reaches year 2100 time += date + '/' + month + '/20' + year return [time] def gpvtg_convert(line): """Translates $GPVTG line into documented array Data only used for measuring ground speed str line - the GPVTG line returns - the data documented into array """ gps = line.strip().split(',') #check data if gps[1] == '0.00': return #jsondata = {'Horizontal speed': gps[7] + ' kmph or ' + gps[5] + 'knots'} return [] def gpgga_convert(line): """Translates $GPGGPA line into documented array str line - the GPGGA line returns - the data documented into array """ gps = line.strip().split(',') #check data if gps[6] == '0' : return fix = '' if gps[6] == '1': fix = 'GPS fix' elif gps[6] == '2': fix = 'DGPS fix' elif gps[6] == '4': fix = 'RTK Fix coordinate (centimeter precision)' elif gps[6] == '5': fix = 'RTK Float (decimeter precision)' #utc = gps[1][0:2] + ':' + gps[1][2:4] + ':' + gps[1][4:6] lat = ddm_dd_convert(gps[2], gps[3]) long = ddm_dd_convert(gps[4], gps[5]) return [lat, long, fix] def gpgsa_convert(line): """Translates $GPGSA line into documented array str line - the GPGSA line returns - the data documented into array """ gps = line.strip().split(',') #check data if gps[2] == '1': return if gps[2] == '2': fix = '2D fix' else: fix = '3D fix' return [fix]
flexible
{ "blob_id": "dc5630e17bb6ed85157b06108250427be41416d1", "index": 7766, "step-1": "<mask token>\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\n<mask token>\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\ndef gpvtg_convert(line):\n \"\"\"Translates $GPVTG line into documented array\n Data only used for measuring ground speed\n str line - the GPVTG line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[1] == '0.00':\n return\n return []\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\n<mask token>\n", "step-3": "def ddm_dd_convert(coord, direction):\n \"\"\"Converts GPS reading from DDM to DD\n str coord - the ddm coordinate from $GPGGA\n str direction - the direction of the coord (N,S,W,E)\n returns - string representation of dd coordinate\n \"\"\"\n value = ''\n if direction == 'S' or direction == 'W':\n value += '-'\n value += coord[0:-7]\n minute = float(coord[-7:])\n decimal = round(minute / 60, 8)\n result = str(decimal)[1:]\n value += result\n return value\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\ndef gpvtg_convert(line):\n \"\"\"Translates $GPVTG line into documented array\n Data only used for measuring ground speed\n str line - the GPVTG line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[1] == '0.00':\n return\n return []\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\n<mask token>\n", "step-4": "def ddm_dd_convert(coord, direction):\n \"\"\"Converts GPS reading from DDM to DD\n str coord - the ddm coordinate from $GPGGA\n str direction - the direction of the coord (N,S,W,E)\n returns - string representation of dd coordinate\n \"\"\"\n value = ''\n if direction == 'S' or direction == 'W':\n value += '-'\n value += coord[0:-7]\n minute = float(coord[-7:])\n decimal = round(minute / 60, 8)\n result = str(decimal)[1:]\n value += result\n return value\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\ndef gpvtg_convert(line):\n \"\"\"Translates $GPVTG line into documented array\n Data only used for measuring ground speed\n str line - the GPVTG line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[1] == '0.00':\n return\n return []\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\ndef gpgsa_convert(line):\n \"\"\"Translates $GPGSA line into documented array\n str line - the GPGSA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == '1':\n return\n if gps[2] == '2':\n fix = '2D fix'\n else:\n fix = '3D fix'\n return [fix]\n", "step-5": "\r\n\r\ndef ddm_dd_convert(coord, direction):\r\n \"\"\"Converts GPS reading from DDM to DD\r\n str coord - the ddm coordinate from $GPGGA\r\n str direction - the direction of the coord (N,S,W,E)\r\n returns - string representation of dd coordinate\r\n \"\"\"\r\n value = ''\r\n if (direction == 'S' or direction == 'W'):\r\n value += '-'\r\n value += coord[0:-7] \r\n minute = float(coord[-7:])\r\n decimal = round(minute / 60, 8)\r\n result = str(decimal)[1:]\r\n value += result\r\n return value\r\n\r\ndef gprmc_convert(line):\r\n \"\"\"Translates $GPRMC line into documented array\r\n str line - the GPRMC line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[2] == 'V':\r\n return\r\n raw_date = gps[9]\r\n time = ''\r\n date = raw_date[0:2]\r\n month = raw_date[2:4]\r\n year = raw_date[4:]\r\n #modify year if reaches year 2100\r\n time += date + '/' + month + '/20' + year\r\n return [time]\r\n\r\n\r\ndef gpvtg_convert(line):\r\n \"\"\"Translates $GPVTG line into documented array\r\n Data only used for measuring ground speed\r\n str line - the GPVTG line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[1] == '0.00': \r\n return\r\n #jsondata = {'Horizontal speed': gps[7] + ' kmph or ' + gps[5] + 'knots'}\r\n return []\r\n\r\n\r\ndef gpgga_convert(line):\r\n \"\"\"Translates $GPGGPA line into documented array\r\n str line - the GPGGA line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[6] == '0' :\r\n return\r\n fix = ''\r\n if gps[6] == '1':\r\n fix = 'GPS fix'\r\n elif gps[6] == '2':\r\n fix = 'DGPS fix'\r\n elif gps[6] == '4':\r\n fix = 'RTK Fix coordinate (centimeter precision)'\r\n elif gps[6] == '5':\r\n fix = 'RTK Float (decimeter precision)'\r\n #utc = gps[1][0:2] + ':' + gps[1][2:4] + ':' + gps[1][4:6]\r\n lat = ddm_dd_convert(gps[2], gps[3])\r\n long = ddm_dd_convert(gps[4], gps[5]) \r\n return [lat, long, fix]\r\n\r\n \r\ndef gpgsa_convert(line):\r\n \"\"\"Translates $GPGSA line into documented array\r\n str line - the GPGSA line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[2] == '1':\r\n return\r\n if gps[2] == '2':\r\n fix = '2D fix'\r\n else:\r\n fix = '3D fix'\r\n return [fix]", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
from pynput import keyboard # list of chars entered by the user list = [] number_of_chars = 0 # if entered chars go above MAX LENGTH they will be written inside a file MAX_LENGTH = 300 def on_press(key): global number_of_chars global list list.append(key) number_of_chars+=1 if number_of_chars>=MAX_LENGTH: write_in_file() list.clear() number_of_chars = 0 def on_release(key): if key == keyboard.Key.esc: # if the user exist write all the contents inside the file write_in_file() return False def write_in_file(): file = open("strokes.txt","a") for k in list: file.writelines("{}\n".format(str(k))) file.close() # erases contents of the file when the program is runned open("strokes.txt","w").close() with keyboard.Listener(on_press = on_press,on_release=on_release) as listener: listener.join()
normal
{ "blob_id": "e60fcf19560b4826577797c8ae8b626ff984dcfd", "index": 6923, "step-1": "<mask token>\n\n\ndef on_release(key):\n if key == keyboard.Key.esc:\n write_in_file()\n return False\n\n\ndef write_in_file():\n file = open('strokes.txt', 'a')\n for k in list:\n file.writelines('{}\\n'.format(str(k)))\n file.close()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef on_press(key):\n global number_of_chars\n global list\n list.append(key)\n number_of_chars += 1\n if number_of_chars >= MAX_LENGTH:\n write_in_file()\n list.clear()\n number_of_chars = 0\n\n\ndef on_release(key):\n if key == keyboard.Key.esc:\n write_in_file()\n return False\n\n\ndef write_in_file():\n file = open('strokes.txt', 'a')\n for k in list:\n file.writelines('{}\\n'.format(str(k)))\n file.close()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef on_press(key):\n global number_of_chars\n global list\n list.append(key)\n number_of_chars += 1\n if number_of_chars >= MAX_LENGTH:\n write_in_file()\n list.clear()\n number_of_chars = 0\n\n\ndef on_release(key):\n if key == keyboard.Key.esc:\n write_in_file()\n return False\n\n\ndef write_in_file():\n file = open('strokes.txt', 'a')\n for k in list:\n file.writelines('{}\\n'.format(str(k)))\n file.close()\n\n\nopen('strokes.txt', 'w').close()\nwith keyboard.Listener(on_press=on_press, on_release=on_release) as listener:\n listener.join()\n", "step-4": "<mask token>\nlist = []\nnumber_of_chars = 0\nMAX_LENGTH = 300\n\n\ndef on_press(key):\n global number_of_chars\n global list\n list.append(key)\n number_of_chars += 1\n if number_of_chars >= MAX_LENGTH:\n write_in_file()\n list.clear()\n number_of_chars = 0\n\n\ndef on_release(key):\n if key == keyboard.Key.esc:\n write_in_file()\n return False\n\n\ndef write_in_file():\n file = open('strokes.txt', 'a')\n for k in list:\n file.writelines('{}\\n'.format(str(k)))\n file.close()\n\n\nopen('strokes.txt', 'w').close()\nwith keyboard.Listener(on_press=on_press, on_release=on_release) as listener:\n listener.join()\n", "step-5": "from pynput import keyboard\n\n# list of chars entered by the user\nlist = []\nnumber_of_chars = 0\n# if entered chars go above MAX LENGTH they will be written inside a file\nMAX_LENGTH = 300\n\ndef on_press(key):\n global number_of_chars\n global list\n \n list.append(key)\n number_of_chars+=1\n\n\n if number_of_chars>=MAX_LENGTH:\n write_in_file()\n list.clear()\n number_of_chars = 0\n\ndef on_release(key):\n if key == keyboard.Key.esc:\n # if the user exist write all the contents inside the file\n write_in_file()\n return False\n\ndef write_in_file():\n file = open(\"strokes.txt\",\"a\")\n for k in list:\n file.writelines(\"{}\\n\".format(str(k)))\n file.close()\n\n\n\n# erases contents of the file when the program is runned\nopen(\"strokes.txt\",\"w\").close()\n\nwith keyboard.Listener(on_press = on_press,on_release=on_release) as listener:\n listener.join()", "step-ids": [ 2, 3, 4, 5, 7 ] }
[ 2, 3, 4, 5, 7 ]
def densenet(D,DT,F,model): import scipy.io as sio import time import os import math import numpy as np import matplotlib.pyplot as plt Dataset = D if DT == 'org': data_type = 'original' else: data_type = 'augmented' fs = model.fs fm1 = model.fm1 batch_size = model.batch_size[0] learn_rate = model.learn_rate num_layers = model.num_layers k_fm = model.k_fm bottleneck = model.bottleneck dropout_prob = model.dropout_prob num_of_test = model.num_of_test ############### # load training / testing set from CrossVal folder, # names for training set, 'D1_1st_fold_train.mat', 'Augmented_D1_1st_fold_train.mat' # name for testing set, 'D1_1st_fold_test.mat' ############### if F == 1: file_name = '1st_fold' elif F == 2: file_name = '2nd_fold' elif F == 3: file_name = '3rd_fold' elif F == 4: file_name = '4th_fold' elif F == 5: file_name = '5th_fold' path = os.path.join('CrossVal', 'D'+Dataset) print("path " ,path) if data_type == 'original': Train =sio.loadmat(os.path.join(path, 'D'+Dataset+'_'+file_name+'_train.mat')) else: Train =sio.loadmat(os.path.join(path, 'Augmented_D'+Dataset+'_'+file_name+'_train.mat')) Test = sio.loadmat(os.path.join(path, 'D'+Dataset+'_'+file_name+'_test.mat')) if Dataset == '1': number_of_classes = 24 num_of_ep = 50 num_of_test = 20 if data_type == 'augmented': train_imgs = 526190 else: train_imgs = 52619 iteration = math.ceil((num_of_ep * train_imgs) / batch_size) elif Dataset == '2': number_of_classes = 36 num_of_ep = 200 if data_type == 'augmented': train_imgs = 20120 else: train_imgs = 2012 iteration = math.ceil((num_of_ep * train_imgs) / batch_size) else: number_of_classes = 10 num_of_ep = 200 if data_type == 'augmented': train_imgs = 16000 else: train_imgs = 1600 iteration = math.ceil((num_of_ep * train_imgs) / batch_size) iteration_to_display = int(iteration / num_of_test) list_to_display = [] for i in range(num_of_test): if i !=num_of_test: list_to_display.append(int(iteration_to_display*(i+1))) del i total_fm_Block_1 = fm1+(num_layers*k_fm) total_fm_Block_2 = total_fm_Block_1+(num_layers*k_fm) total_fm_Block_3 = total_fm_Block_2+(num_layers*k_fm) fc_nodes = [total_fm_Block_3 ] Train_Images = Train['trainImages'] Train_Labels = Train['trainLabels2'] total_trainImages = len(Train_Images[0,2]) print(total_trainImages) Train_Images = Train_Images.reshape(784,total_trainImages).transpose().astype('float32') Train_Labels = Train_Labels.transpose().astype('float64') Test_Images = Test['testImages'] Test_Labels = Test['testLabels2'] total_testImages = len(Test_Images[0,2]) Test_Images = Test_Images.reshape(784,total_testImages).transpose().astype('float32') Test_Labels = Test_Labels.transpose().astype('float64') Target_labels = np.argmax(Test_Labels,axis=1) del Test del Train import tensorflow as tf tf.reset_default_graph() g = tf.Graph() with g.as_default(): tf.set_random_seed(1) def weight_variable(shape,n): initial = tf.truncated_normal(shape, stddev=0.1) return tf.Variable(initial,name=n) def bias_variable(shape,n): initial = tf.constant(0.1, shape=shape) return tf.Variable(initial,name=n) def avg_pool(input, s): return tf.nn.avg_pool(input, [ 1, s, s, 1 ], [1, s, s, 1 ], 'SAME') def max_pool(input, s): return tf.nn.max_pool(input, [ 1, s, s, 1 ], [1, s, s, 1 ], 'SAME') def conv2d_1(input, in_features, out_features, kernel_size, name="W", with_bias=False): W = weight_variable([ kernel_size, kernel_size, in_features, out_features], name) conv = tf.nn.conv2d(input, W, [ 1, 1, 1, 1 ], padding='SAME') if with_bias: return conv + bias_variable([ out_features ]) return conv def batch_activ_conv(current, in_features, out_features, kernel_size, is_training, keep_prob, idx, scope='conv_block'): with tf.variable_scope(scope): current = tf.layers.batch_normalization(current, scale=True, training=is_training) current = tf.nn.relu(current) current = conv2d_1(current, in_features, out_features, kernel_size, name="W"+str(idx)) current = tf.nn.dropout(current, keep_prob) return current def block(input, layers, in_features, growth, is_training, keep_prob, name="Block_"): with tf.name_scope(name): with tf.variable_scope(name): current = input features = in_features for idx in range(layers): tmp = batch_activ_conv(current, features, growth, fs, is_training, keep_prob, idx+1, scope='conv_block_'+str(idx+1)) current = tf.concat((current, tmp), axis=3) features += growth return current, features x = tf.placeholder(tf.float32, shape=[None, 784]) y_ = tf.placeholder(tf.float32, shape=[None, number_of_classes]) x_image = tf.reshape(x, [-1, 28, 28, 1]) keep_prob = tf.placeholder(tf.float32) training = tf.placeholder(tf.bool) current = conv2d_1(x_image, 1, fm1, fs, name="W1", with_bias=False) current, features = block(current, num_layers, fm1, k_fm, training, keep_prob, name="Block_1") b1_conv_printop = tf.Print(current, [current]) with tf.name_scope("transition_lyr"): #current = batch_activ_conv(current, features, features, 1, training, keep_prob, 1, scope='Transition_layer_1') current = batch_activ_conv(current, features, bottleneck*k_fm, 1, training, keep_prob, 1, scope='Transition_layer_1') t1_b_conv_printop = tf.Print(current, [current]) current = batch_activ_conv(current, bottleneck*k_fm, features, fs, training, keep_prob, 1, scope='Transition_layer_1_1') t1_conv_printop = tf.Print(current, [current]) current = max_pool(current, 2) #current = avg_pool(current, 2) current, features = block(current, num_layers, features, k_fm, training, keep_prob, name="Block_2") b2_conv_printop = tf.Print(current, [current]) with tf.name_scope("transition_lyr_2"): #current = batch_activ_conv(current, features, features, 1, training, keep_prob, 1, scope='Transition_layer_2') current = batch_activ_conv(current, features, bottleneck*k_fm, 1, training, keep_prob, 1, scope='Transition_layer_2') t2_b_conv_printop = tf.Print(current, [current]) current = batch_activ_conv(current, bottleneck*k_fm, features, fs, training, keep_prob, 1, scope='Transition_layer_2_1') t2_conv_printop = tf.Print(current, [current]) current = max_pool(current, 2) #current = avg_pool(current, 2) current, features = block(current, num_layers, features, k_fm, training, keep_prob, name="Block_3") b3_conv_printop = tf.Print(current, [current]) with tf.name_scope("transition_lyr_3"): #current = batch_activ_conv(current, features, features, 1, training, keep_prob, 1, scope='Transition_layer_3') current = batch_activ_conv(current, features, bottleneck*k_fm, 1, training, keep_prob, 1, scope='Transition_layer_3') t3_b_conv_printop = tf.Print(current, [current]) current = batch_activ_conv(current, bottleneck*k_fm, features, fs, training, keep_prob, 1, scope='Transition_layer_3_1') t3_conv_printop = tf.Print(current, [current]) current = avg_pool(current, 7) current = tf.reshape(current, [tf.shape(current)[0], -1]) with tf.name_scope("Dense_Last_lyr"): W_fc3 = weight_variable([fc_nodes[0], number_of_classes],"w_fc3") b_fc3 = bias_variable([number_of_classes],"b_fc3") y_conv = tf.matmul(current, W_fc3) + b_fc3 prediction_prob = tf.nn.softmax(y_conv) prediction_prob_printop = tf.Print(prediction_prob, [prediction_prob]) with tf.name_scope("Xent"): cross_entropy = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y_conv)) with tf.name_scope("train"): extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(extra_update_ops): train_step = tf.train.AdamOptimizer(learn_rate).minimize(cross_entropy) with tf.name_scope("accuracy"): correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) wrong_prediction = tf.not_equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1)) wrong_prediction_printop = tf.Print(wrong_prediction, [wrong_prediction]) predicted_labels = tf.argmax(y_conv, 1) predicted_labels_printop = tf.Print(predicted_labels, [predicted_labels]) index = 0 index_end = index + batch_size remaining = 0 start_time = time.time() costs = [] accuracy_list = [] list_of_predicted_list = [] with tf.Session() as sess: sess.run(tf.global_variables_initializer(),tf.set_random_seed(0)) for i in range(iteration): if index_end > total_trainImages: remaining = total_trainImages - (index_end-batch_size) images = Train_Images[(index_end-batch_size):total_trainImages, :] labels = Train_Labels[(index_end-batch_size):total_trainImages, :] index = 0 index_end = index + batch_size - remaining images = np.vstack((images, Train_Images[index:index_end, :])) labels = np.vstack((labels, Train_Labels[index:index_end, :])) batch = (images, labels) index = index_end index_end = index + batch_size else: batch = (Train_Images[index:index_end, :], Train_Labels[index:index_end, :]) index = index + batch_size index_end = index_end + batch_size if i in list_to_display: elapsed_time = time.time() - start_time print('Elapsed Time Before for loop: %f secs' % elapsed_time) Accuracy = 0 itrt_index = i print('debug: %d & %d' % (iteration,i)) if Dataset == '1': if file_name == '5th_fold': num_test = 13154 else: num_test = 13155 elif Dataset == '2': num_test = 503 elif Dataset == '3': num_test = 400 print(num_test) for img_index in range(num_test): t_image = np.array(Test_Images[img_index,:]).reshape(1,784) t_label = np.array(Test_Labels[img_index,:]).reshape(1,number_of_classes) test_acc = accuracy.eval(feed_dict={ x: t_image, y_: t_label, keep_prob: 1.0, training:False}) Accuracy += test_acc wrong, predicted, prediction_prob = sess.run([wrong_prediction_printop, predicted_labels_printop,prediction_prob_printop], feed_dict={ x: t_image, y_: t_label, keep_prob: 1.0, training:False}) if img_index <= 3: b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = sess.run([b1_conv_printop, b2_conv_printop, b3_conv_printop, t1_conv_printop,t2_conv_printop, t3_conv_printop, t1_b_conv_printop, t2_b_conv_printop, t3_b_conv_printop], feed_dict={ x: t_image, y_: t_label, keep_prob: 1.0, training:False}) if img_index == 0: b1_list = b1 b2_list = b2 b3_list = b3 t1_list = t1 t2_list = t2 t3_list = t3 t1_b_list = t1_b t2_b_list = t2_b t3_b_list = t3_b else: b1_list = np.append(b1_list,b1,axis=0) b2_list = np.append(b2_list,b2,axis=0) b3_list = np.append(b3_list,b3,axis=0) t1_list = np.append(t1_list,t1,axis=0) t2_list = np.append(t2_list,t2,axis=0) t3_list = np.append(t3_list,t3,axis=0) t1_b_list = np.append(t1_b_list,t1_b,axis=0) t2_b_list = np.append(t2_b_list,t2_b,axis=0) t3_b_list = np.append(t3_b_list,t3_b,axis=0) if img_index == 0 : wrong_list_1 = wrong predicted_list_1 = predicted prediction_prob_1 = prediction_prob else: wrong_list_1 = np.append(wrong_list_1,wrong,axis=0) predicted_list_1 = np.append(predicted_list_1,predicted,axis=0) prediction_prob_1 = np.append(prediction_prob_1, prediction_prob) Accuracy = Accuracy/num_test accuracy_list.append(Accuracy) list_of_predicted_list.append(predicted_list_1) print('Average test accuracy: %g' % Accuracy) epoch_around = math.ceil((itrt_index * batch_size) / total_trainImages) sio.savemat('D'+Dataset+'_'+file_name+'_'+str(epoch_around)+'ep_'+data_type+'_predicted_labels_list.mat', {'wrong_list':wrong_list_1, 'predicted_list': predicted_list_1, 'Target_labels':Target_labels, 'prediction_prob':prediction_prob, 'b1_list':b1_list, 'b2_list':b2_list, 'b3_list':b3_list, 't1_list':t1_list, 't2_list':t2_list, 't3_list':t3_list, 't1_b_list':t1_b_list, 't2_b_list':t2_b_list, 't3_b_list':t3_b_list}) elapsed_time = time.time() - start_time print('Elapsed Time: %f secs' % elapsed_time) print('Batch Size & Iteration & Total Train Imgs : %d & %d & %d' % (batch_size, itrt_index, total_trainImages)) print('learning_rate : %g ' % learn_rate) print('1st conv FMaps : %d ' % fm1) print('number of layers in dense block : %d ' % num_layers) print('growth rate(k_fm) : %d ' % k_fm) print('filter size : %d ' % fs) print('bottleneck : %d' % bottleneck) print('dropout prob : %g ' % dropout_prob) print('data_type :', data_type) print('file_name :', file_name) print('FC nodes : %d' % fc_nodes[0]) epoch_around = (itrt_index * batch_size) / total_trainImages print('Number of epochs : %f ' % epoch_around) # plot the cost plt.plot(np.squeeze(costs)) plt.ylabel('cost') plt.xlabel('iterations (per tens)') plt.title("Learning rate =" + str(learn_rate)) plt.show() if i % 100 == 0: train_accuracy = accuracy.eval(feed_dict={ x: batch[0], y_: batch[1], keep_prob: 1.0, training:False}) print('step %d, training accuracy %g' % (i, train_accuracy)) _, loss = sess.run([train_step, cross_entropy], feed_dict={x: batch[0], y_: batch[1], keep_prob: dropout_prob, training:True}) iteration_cost = 0 # Defines a cost related to an epoch num_minibatches = int(total_trainImages / batch_size) # number of minibatches of size minibatch_size in the train set iteration_cost += loss / num_minibatches costs.append(iteration_cost) if i % 100 == 0: print ('Loss: ',loss) Accuracy = 0 training_time = time.time() - start_time print('Training Time: %f secs' % training_time) if Dataset == '1': if file_name == '5th_fold': num_test = 13154 else: num_test = 13155 elif Dataset == '2': num_test = 503 elif Dataset == '3': num_test = 400 print(num_test) for img_index in range(num_test): t_image = np.array(Test_Images[img_index,:]).reshape(1,784) t_label = np.array(Test_Labels[img_index,:]).reshape(1,number_of_classes) test_acc = accuracy.eval(feed_dict={ x: t_image, y_: t_label, keep_prob: 1.0, training:False}) Accuracy += test_acc wrong, predicted = sess.run([wrong_prediction_printop, predicted_labels_printop], feed_dict={ x: t_image, y_: t_label, keep_prob: 1.0, training:False}) if img_index <= 3: b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = sess.run([b1_conv_printop, b2_conv_printop, b3_conv_printop, t1_conv_printop,t2_conv_printop, t3_conv_printop, t1_b_conv_printop, t2_b_conv_printop, t3_b_conv_printop], feed_dict={ x: t_image, y_: t_label, keep_prob: 1.0, training:False}) if img_index == 0: b1_list = b1 b2_list = b2 b3_list = b3 t1_list = t1 t2_list = t2 t3_list = t3 t1_b_list = t1_b t2_b_list = t2_b t3_b_list = t3_b else: b1_list = np.append(b1_list,b1,axis=0) b2_list = np.append(b2_list,b2,axis=0) b3_list = np.append(b3_list,b3,axis=0) t1_list = np.append(t1_list,t1,axis=0) t2_list = np.append(t2_list,t2,axis=0) t3_list = np.append(t3_list,t3,axis=0) t1_b_list = np.append(t1_b_list,t1_b,axis=0) t2_b_list = np.append(t2_b_list,t2_b,axis=0) t3_b_list = np.append(t3_b_list,t3_b,axis=0) if img_index == 0 : wrong_list = wrong predicted_list = predicted else: wrong_list = np.append(wrong_list,wrong,axis=0) predicted_list = np.append(predicted_list,predicted,axis=0) Accuracy = Accuracy/num_test print('Average test accuracy: %g' % Accuracy) accuracy_list.append(Accuracy) list_of_predicted_list.append(predicted_list) elapsed_time = time.time() - start_time print('Elapsed Time: %f secs' % elapsed_time) print('Batch Size & Iteration & Total Train Imgs : %d & %d & %d' % (batch_size, itrt_index, total_trainImages)) print('learning_rate : %g ' % learn_rate) print('1st conv FMaps : %d ' % fm1) print('number of layers in dense block : %d ' % num_layers) print('growth rate(k_fm) : %d ' % k_fm) print('filter size : %d ' % fs) print('bottleneck : %d' % bottleneck) print('dropout prob : %g ' % dropout_prob) print('data_type :', data_type) print('file_name :', file_name) print('FC nodes : %d' % fc_nodes[0]) epoch_around = math.ceil((iteration * batch_size) / total_trainImages) if epoch_around == 51: epoch_around = 50 print('Number of epochs : %f ' % epoch_around) # plot the cost plt.plot(np.squeeze(costs)) plt.ylabel('cost') plt.xlabel('iterations (per tens)') plt.title("Learning rate =" + str(learn_rate)) plt.show() sio.savemat('D'+Dataset+'_'+file_name+'_'+str(epoch_around)+'ep_'+data_type+'_predicted_labels_list.mat', {'wrong_list':wrong_list, 'predicted_list': predicted_list, 'Target_labels':Target_labels, 'accuracy_list':accuracy_list, 'list_of_predicted_list':list_of_predicted_list, 'costs':costs, 'b1_list':b1_list, 'b2_list':b2_list, 'b3_list':b3_list, 't1_list':t1_list, 't2_list':t2_list, 't3_list':t3_list, 't1_b_list':t1_b_list, 't2_b_list':t2_b_list, 't3_b_list':t3_b_list}) class MyModel: num_layers = 4 k_fm = 24 fs = 3 fm1 = 32 bottleneck = 4 dropout_prob = 0.8 batch_size = [16] learn_rate = 0.001 num_of_test = 40 model = MyModel() densenet('1','org',1,model) densenet('1','org',2,model) densenet('1','org',3,model) densenet('1','org',4,model) densenet('1','org',5,model) densenet('1','aug',1,model) densenet('1','aug',2,model) densenet('1','aug',3,model) densenet('1','aug',4,model) densenet('1','aug',5,model) densenet('2','org',1,model) densenet('2','org',2,model) densenet('2','org',3,model) densenet('2','org',4,model) densenet('2','org',5,model) densenet('2','aug',1,model) densenet('2','aug',2,model) densenet('2','aug',3,model) densenet('2','aug',4,model) densenet('2','aug',5,model) densenet('3','org',1,model) densenet('3','org',2,model) densenet('3','org',3,model) densenet('3','org',4,model) densenet('3','org',5,model) densenet('3','aug',1,model) densenet('3','aug',2,model) densenet('3','aug',3,model) densenet('3','aug',4,model) densenet('3','aug',5,model)
normal
{ "blob_id": "48270f70a9d69d15f808f22ec2d11d337b2c4845", "index": 7414, "step-1": "<mask token>\n\n\nclass MyModel:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass MyModel:\n num_layers = 4\n k_fm = 24\n fs = 3\n fm1 = 32\n bottleneck = 4\n dropout_prob = 0.8\n batch_size = [16]\n learn_rate = 0.001\n num_of_test = 40\n\n\n<mask token>\n", "step-3": "def densenet(D, DT, F, model):\n import scipy.io as sio\n import time\n import os\n import math\n import numpy as np\n import matplotlib.pyplot as plt\n Dataset = D\n if DT == 'org':\n data_type = 'original'\n else:\n data_type = 'augmented'\n fs = model.fs\n fm1 = model.fm1\n batch_size = model.batch_size[0]\n learn_rate = model.learn_rate\n num_layers = model.num_layers\n k_fm = model.k_fm\n bottleneck = model.bottleneck\n dropout_prob = model.dropout_prob\n num_of_test = model.num_of_test\n if F == 1:\n file_name = '1st_fold'\n elif F == 2:\n file_name = '2nd_fold'\n elif F == 3:\n file_name = '3rd_fold'\n elif F == 4:\n file_name = '4th_fold'\n elif F == 5:\n file_name = '5th_fold'\n path = os.path.join('CrossVal', 'D' + Dataset)\n print('path ', path)\n if data_type == 'original':\n Train = sio.loadmat(os.path.join(path, 'D' + Dataset + '_' +\n file_name + '_train.mat'))\n else:\n Train = sio.loadmat(os.path.join(path, 'Augmented_D' + Dataset +\n '_' + file_name + '_train.mat'))\n Test = sio.loadmat(os.path.join(path, 'D' + Dataset + '_' + file_name +\n '_test.mat'))\n if Dataset == '1':\n number_of_classes = 24\n num_of_ep = 50\n num_of_test = 20\n if data_type == 'augmented':\n train_imgs = 526190\n else:\n train_imgs = 52619\n iteration = math.ceil(num_of_ep * train_imgs / batch_size)\n elif Dataset == '2':\n number_of_classes = 36\n num_of_ep = 200\n if data_type == 'augmented':\n train_imgs = 20120\n else:\n train_imgs = 2012\n iteration = math.ceil(num_of_ep * train_imgs / batch_size)\n else:\n number_of_classes = 10\n num_of_ep = 200\n if data_type == 'augmented':\n train_imgs = 16000\n else:\n train_imgs = 1600\n iteration = math.ceil(num_of_ep * train_imgs / batch_size)\n iteration_to_display = int(iteration / num_of_test)\n list_to_display = []\n for i in range(num_of_test):\n if i != num_of_test:\n list_to_display.append(int(iteration_to_display * (i + 1)))\n del i\n total_fm_Block_1 = fm1 + num_layers * k_fm\n total_fm_Block_2 = total_fm_Block_1 + num_layers * k_fm\n total_fm_Block_3 = total_fm_Block_2 + num_layers * k_fm\n fc_nodes = [total_fm_Block_3]\n Train_Images = Train['trainImages']\n Train_Labels = Train['trainLabels2']\n total_trainImages = len(Train_Images[0, 2])\n print(total_trainImages)\n Train_Images = Train_Images.reshape(784, total_trainImages).transpose(\n ).astype('float32')\n Train_Labels = Train_Labels.transpose().astype('float64')\n Test_Images = Test['testImages']\n Test_Labels = Test['testLabels2']\n total_testImages = len(Test_Images[0, 2])\n Test_Images = Test_Images.reshape(784, total_testImages).transpose(\n ).astype('float32')\n Test_Labels = Test_Labels.transpose().astype('float64')\n Target_labels = np.argmax(Test_Labels, axis=1)\n del Test\n del Train\n import tensorflow as tf\n tf.reset_default_graph()\n g = tf.Graph()\n with g.as_default():\n tf.set_random_seed(1)\n\n def weight_variable(shape, n):\n initial = tf.truncated_normal(shape, stddev=0.1)\n return tf.Variable(initial, name=n)\n\n def bias_variable(shape, n):\n initial = tf.constant(0.1, shape=shape)\n return tf.Variable(initial, name=n)\n\n def avg_pool(input, s):\n return tf.nn.avg_pool(input, [1, s, s, 1], [1, s, s, 1], 'SAME')\n\n def max_pool(input, s):\n return tf.nn.max_pool(input, [1, s, s, 1], [1, s, s, 1], 'SAME')\n\n def conv2d_1(input, in_features, out_features, kernel_size, name=\n 'W', with_bias=False):\n W = weight_variable([kernel_size, kernel_size, in_features,\n out_features], name)\n conv = tf.nn.conv2d(input, W, [1, 1, 1, 1], padding='SAME')\n if with_bias:\n return conv + bias_variable([out_features])\n return conv\n\n def batch_activ_conv(current, in_features, out_features,\n kernel_size, is_training, keep_prob, idx, scope='conv_block'):\n with tf.variable_scope(scope):\n current = tf.layers.batch_normalization(current, scale=True,\n training=is_training)\n current = tf.nn.relu(current)\n current = conv2d_1(current, in_features, out_features,\n kernel_size, name='W' + str(idx))\n current = tf.nn.dropout(current, keep_prob)\n return current\n\n def block(input, layers, in_features, growth, is_training,\n keep_prob, name='Block_'):\n with tf.name_scope(name):\n with tf.variable_scope(name):\n current = input\n features = in_features\n for idx in range(layers):\n tmp = batch_activ_conv(current, features, growth,\n fs, is_training, keep_prob, idx + 1, scope=\n 'conv_block_' + str(idx + 1))\n current = tf.concat((current, tmp), axis=3)\n features += growth\n return current, features\n x = tf.placeholder(tf.float32, shape=[None, 784])\n y_ = tf.placeholder(tf.float32, shape=[None, number_of_classes])\n x_image = tf.reshape(x, [-1, 28, 28, 1])\n keep_prob = tf.placeholder(tf.float32)\n training = tf.placeholder(tf.bool)\n current = conv2d_1(x_image, 1, fm1, fs, name='W1', with_bias=False)\n current, features = block(current, num_layers, fm1, k_fm, training,\n keep_prob, name='Block_1')\n b1_conv_printop = tf.Print(current, [current])\n with tf.name_scope('transition_lyr'):\n current = batch_activ_conv(current, features, bottleneck * k_fm,\n 1, training, keep_prob, 1, scope='Transition_layer_1')\n t1_b_conv_printop = tf.Print(current, [current])\n current = batch_activ_conv(current, bottleneck * k_fm, features,\n fs, training, keep_prob, 1, scope='Transition_layer_1_1')\n t1_conv_printop = tf.Print(current, [current])\n current = max_pool(current, 2)\n current, features = block(current, num_layers, features, k_fm,\n training, keep_prob, name='Block_2')\n b2_conv_printop = tf.Print(current, [current])\n with tf.name_scope('transition_lyr_2'):\n current = batch_activ_conv(current, features, bottleneck * k_fm,\n 1, training, keep_prob, 1, scope='Transition_layer_2')\n t2_b_conv_printop = tf.Print(current, [current])\n current = batch_activ_conv(current, bottleneck * k_fm, features,\n fs, training, keep_prob, 1, scope='Transition_layer_2_1')\n t2_conv_printop = tf.Print(current, [current])\n current = max_pool(current, 2)\n current, features = block(current, num_layers, features, k_fm,\n training, keep_prob, name='Block_3')\n b3_conv_printop = tf.Print(current, [current])\n with tf.name_scope('transition_lyr_3'):\n current = batch_activ_conv(current, features, bottleneck * k_fm,\n 1, training, keep_prob, 1, scope='Transition_layer_3')\n t3_b_conv_printop = tf.Print(current, [current])\n current = batch_activ_conv(current, bottleneck * k_fm, features,\n fs, training, keep_prob, 1, scope='Transition_layer_3_1')\n t3_conv_printop = tf.Print(current, [current])\n current = avg_pool(current, 7)\n current = tf.reshape(current, [tf.shape(current)[0], -1])\n with tf.name_scope('Dense_Last_lyr'):\n W_fc3 = weight_variable([fc_nodes[0], number_of_classes], 'w_fc3')\n b_fc3 = bias_variable([number_of_classes], 'b_fc3')\n y_conv = tf.matmul(current, W_fc3) + b_fc3\n prediction_prob = tf.nn.softmax(y_conv)\n prediction_prob_printop = tf.Print(prediction_prob, [\n prediction_prob])\n with tf.name_scope('Xent'):\n cross_entropy = tf.reduce_mean(tf.nn.\n softmax_cross_entropy_with_logits(labels=y_, logits=y_conv))\n with tf.name_scope('train'):\n extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n with tf.control_dependencies(extra_update_ops):\n train_step = tf.train.AdamOptimizer(learn_rate).minimize(\n cross_entropy)\n with tf.name_scope('accuracy'):\n correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(\n y_, 1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n wrong_prediction = tf.not_equal(tf.argmax(y_conv, 1), tf.argmax\n (y_, 1))\n wrong_prediction_printop = tf.Print(wrong_prediction, [\n wrong_prediction])\n predicted_labels = tf.argmax(y_conv, 1)\n predicted_labels_printop = tf.Print(predicted_labels, [\n predicted_labels])\n index = 0\n index_end = index + batch_size\n remaining = 0\n start_time = time.time()\n costs = []\n accuracy_list = []\n list_of_predicted_list = []\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer(), tf.set_random_seed(0))\n for i in range(iteration):\n if index_end > total_trainImages:\n remaining = total_trainImages - (index_end - batch_size)\n images = Train_Images[index_end - batch_size:\n total_trainImages, :]\n labels = Train_Labels[index_end - batch_size:\n total_trainImages, :]\n index = 0\n index_end = index + batch_size - remaining\n images = np.vstack((images, Train_Images[index:\n index_end, :]))\n labels = np.vstack((labels, Train_Labels[index:\n index_end, :]))\n batch = images, labels\n index = index_end\n index_end = index + batch_size\n else:\n batch = Train_Images[index:index_end, :], Train_Labels[\n index:index_end, :]\n index = index + batch_size\n index_end = index_end + batch_size\n if i in list_to_display:\n elapsed_time = time.time() - start_time\n print('Elapsed Time Before for loop: %f secs' %\n elapsed_time)\n Accuracy = 0\n itrt_index = i\n print('debug: %d & %d' % (iteration, i))\n if Dataset == '1':\n if file_name == '5th_fold':\n num_test = 13154\n else:\n num_test = 13155\n elif Dataset == '2':\n num_test = 503\n elif Dataset == '3':\n num_test = 400\n print(num_test)\n for img_index in range(num_test):\n t_image = np.array(Test_Images[img_index, :]).reshape(\n 1, 784)\n t_label = np.array(Test_Labels[img_index, :]).reshape(\n 1, number_of_classes)\n test_acc = accuracy.eval(feed_dict={x: t_image, y_:\n t_label, keep_prob: 1.0, training: False})\n Accuracy += test_acc\n wrong, predicted, prediction_prob = sess.run([\n wrong_prediction_printop,\n predicted_labels_printop,\n prediction_prob_printop], feed_dict={x: t_image,\n y_: t_label, keep_prob: 1.0, training: False})\n if img_index <= 3:\n b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = (sess\n .run([b1_conv_printop, b2_conv_printop,\n b3_conv_printop, t1_conv_printop,\n t2_conv_printop, t3_conv_printop,\n t1_b_conv_printop, t2_b_conv_printop,\n t3_b_conv_printop], feed_dict={x: t_image,\n y_: t_label, keep_prob: 1.0, training: False}))\n if img_index == 0:\n b1_list = b1\n b2_list = b2\n b3_list = b3\n t1_list = t1\n t2_list = t2\n t3_list = t3\n t1_b_list = t1_b\n t2_b_list = t2_b\n t3_b_list = t3_b\n else:\n b1_list = np.append(b1_list, b1, axis=0)\n b2_list = np.append(b2_list, b2, axis=0)\n b3_list = np.append(b3_list, b3, axis=0)\n t1_list = np.append(t1_list, t1, axis=0)\n t2_list = np.append(t2_list, t2, axis=0)\n t3_list = np.append(t3_list, t3, axis=0)\n t1_b_list = np.append(t1_b_list, t1_b, axis=0)\n t2_b_list = np.append(t2_b_list, t2_b, axis=0)\n t3_b_list = np.append(t3_b_list, t3_b, axis=0)\n if img_index == 0:\n wrong_list_1 = wrong\n predicted_list_1 = predicted\n prediction_prob_1 = prediction_prob\n else:\n wrong_list_1 = np.append(wrong_list_1, wrong,\n axis=0)\n predicted_list_1 = np.append(predicted_list_1,\n predicted, axis=0)\n prediction_prob_1 = np.append(prediction_prob_1,\n prediction_prob)\n Accuracy = Accuracy / num_test\n accuracy_list.append(Accuracy)\n list_of_predicted_list.append(predicted_list_1)\n print('Average test accuracy: %g' % Accuracy)\n epoch_around = math.ceil(itrt_index * batch_size /\n total_trainImages)\n sio.savemat('D' + Dataset + '_' + file_name + '_' + str\n (epoch_around) + 'ep_' + data_type +\n '_predicted_labels_list.mat', {'wrong_list':\n wrong_list_1, 'predicted_list': predicted_list_1,\n 'Target_labels': Target_labels, 'prediction_prob':\n prediction_prob, 'b1_list': b1_list, 'b2_list':\n b2_list, 'b3_list': b3_list, 't1_list': t1_list,\n 't2_list': t2_list, 't3_list': t3_list, 't1_b_list':\n t1_b_list, 't2_b_list': t2_b_list, 't3_b_list':\n t3_b_list})\n elapsed_time = time.time() - start_time\n print('Elapsed Time: %f secs' % elapsed_time)\n print(\n 'Batch Size & Iteration & Total Train Imgs : %d & %d & %d'\n % (batch_size, itrt_index, total_trainImages))\n print('learning_rate : %g ' % learn_rate)\n print('1st conv FMaps : %d ' % fm1)\n print('number of layers in dense block : %d ' % num_layers)\n print('growth rate(k_fm) : %d ' % k_fm)\n print('filter size : %d ' % fs)\n print('bottleneck : %d' % bottleneck)\n print('dropout prob : %g ' % dropout_prob)\n print('data_type :', data_type)\n print('file_name :', file_name)\n print('FC nodes : %d' % fc_nodes[0])\n epoch_around = itrt_index * batch_size / total_trainImages\n print('Number of epochs : %f ' % epoch_around)\n plt.plot(np.squeeze(costs))\n plt.ylabel('cost')\n plt.xlabel('iterations (per tens)')\n plt.title('Learning rate =' + str(learn_rate))\n plt.show()\n if i % 100 == 0:\n train_accuracy = accuracy.eval(feed_dict={x: batch[0],\n y_: batch[1], keep_prob: 1.0, training: False})\n print('step %d, training accuracy %g' % (i, train_accuracy)\n )\n _, loss = sess.run([train_step, cross_entropy], feed_dict={\n x: batch[0], y_: batch[1], keep_prob: dropout_prob,\n training: True})\n iteration_cost = 0\n num_minibatches = int(total_trainImages / batch_size)\n iteration_cost += loss / num_minibatches\n costs.append(iteration_cost)\n if i % 100 == 0:\n print('Loss: ', loss)\n Accuracy = 0\n training_time = time.time() - start_time\n print('Training Time: %f secs' % training_time)\n if Dataset == '1':\n if file_name == '5th_fold':\n num_test = 13154\n else:\n num_test = 13155\n elif Dataset == '2':\n num_test = 503\n elif Dataset == '3':\n num_test = 400\n print(num_test)\n for img_index in range(num_test):\n t_image = np.array(Test_Images[img_index, :]).reshape(1, 784)\n t_label = np.array(Test_Labels[img_index, :]).reshape(1,\n number_of_classes)\n test_acc = accuracy.eval(feed_dict={x: t_image, y_: t_label,\n keep_prob: 1.0, training: False})\n Accuracy += test_acc\n wrong, predicted = sess.run([wrong_prediction_printop,\n predicted_labels_printop], feed_dict={x: t_image, y_:\n t_label, keep_prob: 1.0, training: False})\n if img_index <= 3:\n b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = sess.run([\n b1_conv_printop, b2_conv_printop, b3_conv_printop,\n t1_conv_printop, t2_conv_printop, t3_conv_printop,\n t1_b_conv_printop, t2_b_conv_printop,\n t3_b_conv_printop], feed_dict={x: t_image, y_:\n t_label, keep_prob: 1.0, training: False})\n if img_index == 0:\n b1_list = b1\n b2_list = b2\n b3_list = b3\n t1_list = t1\n t2_list = t2\n t3_list = t3\n t1_b_list = t1_b\n t2_b_list = t2_b\n t3_b_list = t3_b\n else:\n b1_list = np.append(b1_list, b1, axis=0)\n b2_list = np.append(b2_list, b2, axis=0)\n b3_list = np.append(b3_list, b3, axis=0)\n t1_list = np.append(t1_list, t1, axis=0)\n t2_list = np.append(t2_list, t2, axis=0)\n t3_list = np.append(t3_list, t3, axis=0)\n t1_b_list = np.append(t1_b_list, t1_b, axis=0)\n t2_b_list = np.append(t2_b_list, t2_b, axis=0)\n t3_b_list = np.append(t3_b_list, t3_b, axis=0)\n if img_index == 0:\n wrong_list = wrong\n predicted_list = predicted\n else:\n wrong_list = np.append(wrong_list, wrong, axis=0)\n predicted_list = np.append(predicted_list, predicted,\n axis=0)\n Accuracy = Accuracy / num_test\n print('Average test accuracy: %g' % Accuracy)\n accuracy_list.append(Accuracy)\n list_of_predicted_list.append(predicted_list)\n elapsed_time = time.time() - start_time\n print('Elapsed Time: %f secs' % elapsed_time)\n print(\n 'Batch Size & Iteration & Total Train Imgs : %d & %d & %d' %\n (batch_size, itrt_index, total_trainImages))\n print('learning_rate : %g ' % learn_rate)\n print('1st conv FMaps : %d ' % fm1)\n print('number of layers in dense block : %d ' % num_layers)\n print('growth rate(k_fm) : %d ' % k_fm)\n print('filter size : %d ' % fs)\n print('bottleneck : %d' % bottleneck)\n print('dropout prob : %g ' % dropout_prob)\n print('data_type :', data_type)\n print('file_name :', file_name)\n print('FC nodes : %d' % fc_nodes[0])\n epoch_around = math.ceil(iteration * batch_size / total_trainImages\n )\n if epoch_around == 51:\n epoch_around = 50\n print('Number of epochs : %f ' % epoch_around)\n plt.plot(np.squeeze(costs))\n plt.ylabel('cost')\n plt.xlabel('iterations (per tens)')\n plt.title('Learning rate =' + str(learn_rate))\n plt.show()\n sio.savemat('D' + Dataset + '_' + file_name + '_' + str(\n epoch_around) + 'ep_' + data_type +\n '_predicted_labels_list.mat', {'wrong_list': wrong_list,\n 'predicted_list': predicted_list, 'Target_labels':\n Target_labels, 'accuracy_list': accuracy_list,\n 'list_of_predicted_list': list_of_predicted_list, 'costs':\n costs, 'b1_list': b1_list, 'b2_list': b2_list, 'b3_list':\n b3_list, 't1_list': t1_list, 't2_list': t2_list, 't3_list':\n t3_list, 't1_b_list': t1_b_list, 't2_b_list': t2_b_list,\n 't3_b_list': t3_b_list})\n\n\nclass MyModel:\n num_layers = 4\n k_fm = 24\n fs = 3\n fm1 = 32\n bottleneck = 4\n dropout_prob = 0.8\n batch_size = [16]\n learn_rate = 0.001\n num_of_test = 40\n\n\n<mask token>\n", "step-4": "def densenet(D, DT, F, model):\n import scipy.io as sio\n import time\n import os\n import math\n import numpy as np\n import matplotlib.pyplot as plt\n Dataset = D\n if DT == 'org':\n data_type = 'original'\n else:\n data_type = 'augmented'\n fs = model.fs\n fm1 = model.fm1\n batch_size = model.batch_size[0]\n learn_rate = model.learn_rate\n num_layers = model.num_layers\n k_fm = model.k_fm\n bottleneck = model.bottleneck\n dropout_prob = model.dropout_prob\n num_of_test = model.num_of_test\n if F == 1:\n file_name = '1st_fold'\n elif F == 2:\n file_name = '2nd_fold'\n elif F == 3:\n file_name = '3rd_fold'\n elif F == 4:\n file_name = '4th_fold'\n elif F == 5:\n file_name = '5th_fold'\n path = os.path.join('CrossVal', 'D' + Dataset)\n print('path ', path)\n if data_type == 'original':\n Train = sio.loadmat(os.path.join(path, 'D' + Dataset + '_' +\n file_name + '_train.mat'))\n else:\n Train = sio.loadmat(os.path.join(path, 'Augmented_D' + Dataset +\n '_' + file_name + '_train.mat'))\n Test = sio.loadmat(os.path.join(path, 'D' + Dataset + '_' + file_name +\n '_test.mat'))\n if Dataset == '1':\n number_of_classes = 24\n num_of_ep = 50\n num_of_test = 20\n if data_type == 'augmented':\n train_imgs = 526190\n else:\n train_imgs = 52619\n iteration = math.ceil(num_of_ep * train_imgs / batch_size)\n elif Dataset == '2':\n number_of_classes = 36\n num_of_ep = 200\n if data_type == 'augmented':\n train_imgs = 20120\n else:\n train_imgs = 2012\n iteration = math.ceil(num_of_ep * train_imgs / batch_size)\n else:\n number_of_classes = 10\n num_of_ep = 200\n if data_type == 'augmented':\n train_imgs = 16000\n else:\n train_imgs = 1600\n iteration = math.ceil(num_of_ep * train_imgs / batch_size)\n iteration_to_display = int(iteration / num_of_test)\n list_to_display = []\n for i in range(num_of_test):\n if i != num_of_test:\n list_to_display.append(int(iteration_to_display * (i + 1)))\n del i\n total_fm_Block_1 = fm1 + num_layers * k_fm\n total_fm_Block_2 = total_fm_Block_1 + num_layers * k_fm\n total_fm_Block_3 = total_fm_Block_2 + num_layers * k_fm\n fc_nodes = [total_fm_Block_3]\n Train_Images = Train['trainImages']\n Train_Labels = Train['trainLabels2']\n total_trainImages = len(Train_Images[0, 2])\n print(total_trainImages)\n Train_Images = Train_Images.reshape(784, total_trainImages).transpose(\n ).astype('float32')\n Train_Labels = Train_Labels.transpose().astype('float64')\n Test_Images = Test['testImages']\n Test_Labels = Test['testLabels2']\n total_testImages = len(Test_Images[0, 2])\n Test_Images = Test_Images.reshape(784, total_testImages).transpose(\n ).astype('float32')\n Test_Labels = Test_Labels.transpose().astype('float64')\n Target_labels = np.argmax(Test_Labels, axis=1)\n del Test\n del Train\n import tensorflow as tf\n tf.reset_default_graph()\n g = tf.Graph()\n with g.as_default():\n tf.set_random_seed(1)\n\n def weight_variable(shape, n):\n initial = tf.truncated_normal(shape, stddev=0.1)\n return tf.Variable(initial, name=n)\n\n def bias_variable(shape, n):\n initial = tf.constant(0.1, shape=shape)\n return tf.Variable(initial, name=n)\n\n def avg_pool(input, s):\n return tf.nn.avg_pool(input, [1, s, s, 1], [1, s, s, 1], 'SAME')\n\n def max_pool(input, s):\n return tf.nn.max_pool(input, [1, s, s, 1], [1, s, s, 1], 'SAME')\n\n def conv2d_1(input, in_features, out_features, kernel_size, name=\n 'W', with_bias=False):\n W = weight_variable([kernel_size, kernel_size, in_features,\n out_features], name)\n conv = tf.nn.conv2d(input, W, [1, 1, 1, 1], padding='SAME')\n if with_bias:\n return conv + bias_variable([out_features])\n return conv\n\n def batch_activ_conv(current, in_features, out_features,\n kernel_size, is_training, keep_prob, idx, scope='conv_block'):\n with tf.variable_scope(scope):\n current = tf.layers.batch_normalization(current, scale=True,\n training=is_training)\n current = tf.nn.relu(current)\n current = conv2d_1(current, in_features, out_features,\n kernel_size, name='W' + str(idx))\n current = tf.nn.dropout(current, keep_prob)\n return current\n\n def block(input, layers, in_features, growth, is_training,\n keep_prob, name='Block_'):\n with tf.name_scope(name):\n with tf.variable_scope(name):\n current = input\n features = in_features\n for idx in range(layers):\n tmp = batch_activ_conv(current, features, growth,\n fs, is_training, keep_prob, idx + 1, scope=\n 'conv_block_' + str(idx + 1))\n current = tf.concat((current, tmp), axis=3)\n features += growth\n return current, features\n x = tf.placeholder(tf.float32, shape=[None, 784])\n y_ = tf.placeholder(tf.float32, shape=[None, number_of_classes])\n x_image = tf.reshape(x, [-1, 28, 28, 1])\n keep_prob = tf.placeholder(tf.float32)\n training = tf.placeholder(tf.bool)\n current = conv2d_1(x_image, 1, fm1, fs, name='W1', with_bias=False)\n current, features = block(current, num_layers, fm1, k_fm, training,\n keep_prob, name='Block_1')\n b1_conv_printop = tf.Print(current, [current])\n with tf.name_scope('transition_lyr'):\n current = batch_activ_conv(current, features, bottleneck * k_fm,\n 1, training, keep_prob, 1, scope='Transition_layer_1')\n t1_b_conv_printop = tf.Print(current, [current])\n current = batch_activ_conv(current, bottleneck * k_fm, features,\n fs, training, keep_prob, 1, scope='Transition_layer_1_1')\n t1_conv_printop = tf.Print(current, [current])\n current = max_pool(current, 2)\n current, features = block(current, num_layers, features, k_fm,\n training, keep_prob, name='Block_2')\n b2_conv_printop = tf.Print(current, [current])\n with tf.name_scope('transition_lyr_2'):\n current = batch_activ_conv(current, features, bottleneck * k_fm,\n 1, training, keep_prob, 1, scope='Transition_layer_2')\n t2_b_conv_printop = tf.Print(current, [current])\n current = batch_activ_conv(current, bottleneck * k_fm, features,\n fs, training, keep_prob, 1, scope='Transition_layer_2_1')\n t2_conv_printop = tf.Print(current, [current])\n current = max_pool(current, 2)\n current, features = block(current, num_layers, features, k_fm,\n training, keep_prob, name='Block_3')\n b3_conv_printop = tf.Print(current, [current])\n with tf.name_scope('transition_lyr_3'):\n current = batch_activ_conv(current, features, bottleneck * k_fm,\n 1, training, keep_prob, 1, scope='Transition_layer_3')\n t3_b_conv_printop = tf.Print(current, [current])\n current = batch_activ_conv(current, bottleneck * k_fm, features,\n fs, training, keep_prob, 1, scope='Transition_layer_3_1')\n t3_conv_printop = tf.Print(current, [current])\n current = avg_pool(current, 7)\n current = tf.reshape(current, [tf.shape(current)[0], -1])\n with tf.name_scope('Dense_Last_lyr'):\n W_fc3 = weight_variable([fc_nodes[0], number_of_classes], 'w_fc3')\n b_fc3 = bias_variable([number_of_classes], 'b_fc3')\n y_conv = tf.matmul(current, W_fc3) + b_fc3\n prediction_prob = tf.nn.softmax(y_conv)\n prediction_prob_printop = tf.Print(prediction_prob, [\n prediction_prob])\n with tf.name_scope('Xent'):\n cross_entropy = tf.reduce_mean(tf.nn.\n softmax_cross_entropy_with_logits(labels=y_, logits=y_conv))\n with tf.name_scope('train'):\n extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n with tf.control_dependencies(extra_update_ops):\n train_step = tf.train.AdamOptimizer(learn_rate).minimize(\n cross_entropy)\n with tf.name_scope('accuracy'):\n correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(\n y_, 1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n wrong_prediction = tf.not_equal(tf.argmax(y_conv, 1), tf.argmax\n (y_, 1))\n wrong_prediction_printop = tf.Print(wrong_prediction, [\n wrong_prediction])\n predicted_labels = tf.argmax(y_conv, 1)\n predicted_labels_printop = tf.Print(predicted_labels, [\n predicted_labels])\n index = 0\n index_end = index + batch_size\n remaining = 0\n start_time = time.time()\n costs = []\n accuracy_list = []\n list_of_predicted_list = []\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer(), tf.set_random_seed(0))\n for i in range(iteration):\n if index_end > total_trainImages:\n remaining = total_trainImages - (index_end - batch_size)\n images = Train_Images[index_end - batch_size:\n total_trainImages, :]\n labels = Train_Labels[index_end - batch_size:\n total_trainImages, :]\n index = 0\n index_end = index + batch_size - remaining\n images = np.vstack((images, Train_Images[index:\n index_end, :]))\n labels = np.vstack((labels, Train_Labels[index:\n index_end, :]))\n batch = images, labels\n index = index_end\n index_end = index + batch_size\n else:\n batch = Train_Images[index:index_end, :], Train_Labels[\n index:index_end, :]\n index = index + batch_size\n index_end = index_end + batch_size\n if i in list_to_display:\n elapsed_time = time.time() - start_time\n print('Elapsed Time Before for loop: %f secs' %\n elapsed_time)\n Accuracy = 0\n itrt_index = i\n print('debug: %d & %d' % (iteration, i))\n if Dataset == '1':\n if file_name == '5th_fold':\n num_test = 13154\n else:\n num_test = 13155\n elif Dataset == '2':\n num_test = 503\n elif Dataset == '3':\n num_test = 400\n print(num_test)\n for img_index in range(num_test):\n t_image = np.array(Test_Images[img_index, :]).reshape(\n 1, 784)\n t_label = np.array(Test_Labels[img_index, :]).reshape(\n 1, number_of_classes)\n test_acc = accuracy.eval(feed_dict={x: t_image, y_:\n t_label, keep_prob: 1.0, training: False})\n Accuracy += test_acc\n wrong, predicted, prediction_prob = sess.run([\n wrong_prediction_printop,\n predicted_labels_printop,\n prediction_prob_printop], feed_dict={x: t_image,\n y_: t_label, keep_prob: 1.0, training: False})\n if img_index <= 3:\n b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = (sess\n .run([b1_conv_printop, b2_conv_printop,\n b3_conv_printop, t1_conv_printop,\n t2_conv_printop, t3_conv_printop,\n t1_b_conv_printop, t2_b_conv_printop,\n t3_b_conv_printop], feed_dict={x: t_image,\n y_: t_label, keep_prob: 1.0, training: False}))\n if img_index == 0:\n b1_list = b1\n b2_list = b2\n b3_list = b3\n t1_list = t1\n t2_list = t2\n t3_list = t3\n t1_b_list = t1_b\n t2_b_list = t2_b\n t3_b_list = t3_b\n else:\n b1_list = np.append(b1_list, b1, axis=0)\n b2_list = np.append(b2_list, b2, axis=0)\n b3_list = np.append(b3_list, b3, axis=0)\n t1_list = np.append(t1_list, t1, axis=0)\n t2_list = np.append(t2_list, t2, axis=0)\n t3_list = np.append(t3_list, t3, axis=0)\n t1_b_list = np.append(t1_b_list, t1_b, axis=0)\n t2_b_list = np.append(t2_b_list, t2_b, axis=0)\n t3_b_list = np.append(t3_b_list, t3_b, axis=0)\n if img_index == 0:\n wrong_list_1 = wrong\n predicted_list_1 = predicted\n prediction_prob_1 = prediction_prob\n else:\n wrong_list_1 = np.append(wrong_list_1, wrong,\n axis=0)\n predicted_list_1 = np.append(predicted_list_1,\n predicted, axis=0)\n prediction_prob_1 = np.append(prediction_prob_1,\n prediction_prob)\n Accuracy = Accuracy / num_test\n accuracy_list.append(Accuracy)\n list_of_predicted_list.append(predicted_list_1)\n print('Average test accuracy: %g' % Accuracy)\n epoch_around = math.ceil(itrt_index * batch_size /\n total_trainImages)\n sio.savemat('D' + Dataset + '_' + file_name + '_' + str\n (epoch_around) + 'ep_' + data_type +\n '_predicted_labels_list.mat', {'wrong_list':\n wrong_list_1, 'predicted_list': predicted_list_1,\n 'Target_labels': Target_labels, 'prediction_prob':\n prediction_prob, 'b1_list': b1_list, 'b2_list':\n b2_list, 'b3_list': b3_list, 't1_list': t1_list,\n 't2_list': t2_list, 't3_list': t3_list, 't1_b_list':\n t1_b_list, 't2_b_list': t2_b_list, 't3_b_list':\n t3_b_list})\n elapsed_time = time.time() - start_time\n print('Elapsed Time: %f secs' % elapsed_time)\n print(\n 'Batch Size & Iteration & Total Train Imgs : %d & %d & %d'\n % (batch_size, itrt_index, total_trainImages))\n print('learning_rate : %g ' % learn_rate)\n print('1st conv FMaps : %d ' % fm1)\n print('number of layers in dense block : %d ' % num_layers)\n print('growth rate(k_fm) : %d ' % k_fm)\n print('filter size : %d ' % fs)\n print('bottleneck : %d' % bottleneck)\n print('dropout prob : %g ' % dropout_prob)\n print('data_type :', data_type)\n print('file_name :', file_name)\n print('FC nodes : %d' % fc_nodes[0])\n epoch_around = itrt_index * batch_size / total_trainImages\n print('Number of epochs : %f ' % epoch_around)\n plt.plot(np.squeeze(costs))\n plt.ylabel('cost')\n plt.xlabel('iterations (per tens)')\n plt.title('Learning rate =' + str(learn_rate))\n plt.show()\n if i % 100 == 0:\n train_accuracy = accuracy.eval(feed_dict={x: batch[0],\n y_: batch[1], keep_prob: 1.0, training: False})\n print('step %d, training accuracy %g' % (i, train_accuracy)\n )\n _, loss = sess.run([train_step, cross_entropy], feed_dict={\n x: batch[0], y_: batch[1], keep_prob: dropout_prob,\n training: True})\n iteration_cost = 0\n num_minibatches = int(total_trainImages / batch_size)\n iteration_cost += loss / num_minibatches\n costs.append(iteration_cost)\n if i % 100 == 0:\n print('Loss: ', loss)\n Accuracy = 0\n training_time = time.time() - start_time\n print('Training Time: %f secs' % training_time)\n if Dataset == '1':\n if file_name == '5th_fold':\n num_test = 13154\n else:\n num_test = 13155\n elif Dataset == '2':\n num_test = 503\n elif Dataset == '3':\n num_test = 400\n print(num_test)\n for img_index in range(num_test):\n t_image = np.array(Test_Images[img_index, :]).reshape(1, 784)\n t_label = np.array(Test_Labels[img_index, :]).reshape(1,\n number_of_classes)\n test_acc = accuracy.eval(feed_dict={x: t_image, y_: t_label,\n keep_prob: 1.0, training: False})\n Accuracy += test_acc\n wrong, predicted = sess.run([wrong_prediction_printop,\n predicted_labels_printop], feed_dict={x: t_image, y_:\n t_label, keep_prob: 1.0, training: False})\n if img_index <= 3:\n b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = sess.run([\n b1_conv_printop, b2_conv_printop, b3_conv_printop,\n t1_conv_printop, t2_conv_printop, t3_conv_printop,\n t1_b_conv_printop, t2_b_conv_printop,\n t3_b_conv_printop], feed_dict={x: t_image, y_:\n t_label, keep_prob: 1.0, training: False})\n if img_index == 0:\n b1_list = b1\n b2_list = b2\n b3_list = b3\n t1_list = t1\n t2_list = t2\n t3_list = t3\n t1_b_list = t1_b\n t2_b_list = t2_b\n t3_b_list = t3_b\n else:\n b1_list = np.append(b1_list, b1, axis=0)\n b2_list = np.append(b2_list, b2, axis=0)\n b3_list = np.append(b3_list, b3, axis=0)\n t1_list = np.append(t1_list, t1, axis=0)\n t2_list = np.append(t2_list, t2, axis=0)\n t3_list = np.append(t3_list, t3, axis=0)\n t1_b_list = np.append(t1_b_list, t1_b, axis=0)\n t2_b_list = np.append(t2_b_list, t2_b, axis=0)\n t3_b_list = np.append(t3_b_list, t3_b, axis=0)\n if img_index == 0:\n wrong_list = wrong\n predicted_list = predicted\n else:\n wrong_list = np.append(wrong_list, wrong, axis=0)\n predicted_list = np.append(predicted_list, predicted,\n axis=0)\n Accuracy = Accuracy / num_test\n print('Average test accuracy: %g' % Accuracy)\n accuracy_list.append(Accuracy)\n list_of_predicted_list.append(predicted_list)\n elapsed_time = time.time() - start_time\n print('Elapsed Time: %f secs' % elapsed_time)\n print(\n 'Batch Size & Iteration & Total Train Imgs : %d & %d & %d' %\n (batch_size, itrt_index, total_trainImages))\n print('learning_rate : %g ' % learn_rate)\n print('1st conv FMaps : %d ' % fm1)\n print('number of layers in dense block : %d ' % num_layers)\n print('growth rate(k_fm) : %d ' % k_fm)\n print('filter size : %d ' % fs)\n print('bottleneck : %d' % bottleneck)\n print('dropout prob : %g ' % dropout_prob)\n print('data_type :', data_type)\n print('file_name :', file_name)\n print('FC nodes : %d' % fc_nodes[0])\n epoch_around = math.ceil(iteration * batch_size / total_trainImages\n )\n if epoch_around == 51:\n epoch_around = 50\n print('Number of epochs : %f ' % epoch_around)\n plt.plot(np.squeeze(costs))\n plt.ylabel('cost')\n plt.xlabel('iterations (per tens)')\n plt.title('Learning rate =' + str(learn_rate))\n plt.show()\n sio.savemat('D' + Dataset + '_' + file_name + '_' + str(\n epoch_around) + 'ep_' + data_type +\n '_predicted_labels_list.mat', {'wrong_list': wrong_list,\n 'predicted_list': predicted_list, 'Target_labels':\n Target_labels, 'accuracy_list': accuracy_list,\n 'list_of_predicted_list': list_of_predicted_list, 'costs':\n costs, 'b1_list': b1_list, 'b2_list': b2_list, 'b3_list':\n b3_list, 't1_list': t1_list, 't2_list': t2_list, 't3_list':\n t3_list, 't1_b_list': t1_b_list, 't2_b_list': t2_b_list,\n 't3_b_list': t3_b_list})\n\n\nclass MyModel:\n num_layers = 4\n k_fm = 24\n fs = 3\n fm1 = 32\n bottleneck = 4\n dropout_prob = 0.8\n batch_size = [16]\n learn_rate = 0.001\n num_of_test = 40\n\n\nmodel = MyModel()\ndensenet('1', 'org', 1, model)\ndensenet('1', 'org', 2, model)\ndensenet('1', 'org', 3, model)\ndensenet('1', 'org', 4, model)\ndensenet('1', 'org', 5, model)\ndensenet('1', 'aug', 1, model)\ndensenet('1', 'aug', 2, model)\ndensenet('1', 'aug', 3, model)\ndensenet('1', 'aug', 4, model)\ndensenet('1', 'aug', 5, model)\ndensenet('2', 'org', 1, model)\ndensenet('2', 'org', 2, model)\ndensenet('2', 'org', 3, model)\ndensenet('2', 'org', 4, model)\ndensenet('2', 'org', 5, model)\ndensenet('2', 'aug', 1, model)\ndensenet('2', 'aug', 2, model)\ndensenet('2', 'aug', 3, model)\ndensenet('2', 'aug', 4, model)\ndensenet('2', 'aug', 5, model)\ndensenet('3', 'org', 1, model)\ndensenet('3', 'org', 2, model)\ndensenet('3', 'org', 3, model)\ndensenet('3', 'org', 4, model)\ndensenet('3', 'org', 5, model)\ndensenet('3', 'aug', 1, model)\ndensenet('3', 'aug', 2, model)\ndensenet('3', 'aug', 3, model)\ndensenet('3', 'aug', 4, model)\ndensenet('3', 'aug', 5, model)\n", "step-5": "def densenet(D,DT,F,model):\r\n import scipy.io as sio\r\n import time\r\n import os\r\n import math\r\n import numpy as np\r\n import matplotlib.pyplot as plt\r\n\r\n\r\n Dataset = D\r\n if DT == 'org':\r\n data_type = 'original'\r\n else:\r\n data_type = 'augmented'\r\n\r\n fs = model.fs\r\n fm1 = model.fm1\r\n batch_size = model.batch_size[0] \r\n learn_rate = model.learn_rate\r\n num_layers = model.num_layers\r\n k_fm = model.k_fm\r\n bottleneck = model.bottleneck\r\n dropout_prob = model.dropout_prob\r\n num_of_test = model.num_of_test\r\n\r\n ###############\r\n # load training / testing set from CrossVal folder,\r\n # names for training set, 'D1_1st_fold_train.mat', 'Augmented_D1_1st_fold_train.mat'\r\n # name for testing set, 'D1_1st_fold_test.mat'\r\n ###############\r\n if F == 1:\r\n file_name = '1st_fold'\r\n elif F == 2:\r\n file_name = '2nd_fold'\r\n elif F == 3:\r\n file_name = '3rd_fold'\r\n elif F == 4:\r\n file_name = '4th_fold'\r\n elif F == 5:\r\n file_name = '5th_fold'\r\n path = os.path.join('CrossVal', 'D'+Dataset)\r\n print(\"path \" ,path)\r\n if data_type == 'original':\r\n Train =sio.loadmat(os.path.join(path, 'D'+Dataset+'_'+file_name+'_train.mat'))\r\n else:\r\n Train =sio.loadmat(os.path.join(path, 'Augmented_D'+Dataset+'_'+file_name+'_train.mat'))\r\n Test = sio.loadmat(os.path.join(path, 'D'+Dataset+'_'+file_name+'_test.mat'))\r\n\r\n if Dataset == '1':\r\n number_of_classes = 24\r\n num_of_ep = 50\r\n num_of_test = 20\r\n if data_type == 'augmented':\r\n train_imgs = 526190\r\n else:\r\n train_imgs = 52619\r\n iteration = math.ceil((num_of_ep * train_imgs) / batch_size)\r\n elif Dataset == '2':\r\n number_of_classes = 36\r\n num_of_ep = 200\r\n if data_type == 'augmented':\r\n train_imgs = 20120\r\n else:\r\n train_imgs = 2012\r\n iteration = math.ceil((num_of_ep * train_imgs) / batch_size)\r\n else:\r\n number_of_classes = 10\r\n num_of_ep = 200\r\n if data_type == 'augmented':\r\n train_imgs = 16000\r\n else:\r\n train_imgs = 1600\r\n iteration = math.ceil((num_of_ep * train_imgs) / batch_size)\r\n\r\n iteration_to_display = int(iteration / num_of_test) \r\n list_to_display = []\r\n for i in range(num_of_test):\r\n if i !=num_of_test:\r\n list_to_display.append(int(iteration_to_display*(i+1)))\r\n del i\r\n\r\n\r\n total_fm_Block_1 = fm1+(num_layers*k_fm)\r\n total_fm_Block_2 = total_fm_Block_1+(num_layers*k_fm)\r\n total_fm_Block_3 = total_fm_Block_2+(num_layers*k_fm)\r\n fc_nodes = [total_fm_Block_3 ]\r\n\r\n\r\n Train_Images = Train['trainImages']\r\n Train_Labels = Train['trainLabels2']\r\n total_trainImages = len(Train_Images[0,2])\r\n print(total_trainImages)\r\n Train_Images = Train_Images.reshape(784,total_trainImages).transpose().astype('float32')\r\n Train_Labels = Train_Labels.transpose().astype('float64')\r\n\r\n\r\n Test_Images = Test['testImages']\r\n Test_Labels = Test['testLabels2']\r\n total_testImages = len(Test_Images[0,2])\r\n Test_Images = Test_Images.reshape(784,total_testImages).transpose().astype('float32')\r\n Test_Labels = Test_Labels.transpose().astype('float64')\r\n Target_labels = np.argmax(Test_Labels,axis=1)\r\n\r\n del Test\r\n del Train\r\n\r\n import tensorflow as tf\r\n tf.reset_default_graph()\r\n g = tf.Graph()\r\n with g.as_default():\r\n tf.set_random_seed(1)\r\n\r\n def weight_variable(shape,n):\r\n initial = tf.truncated_normal(shape, stddev=0.1)\r\n return tf.Variable(initial,name=n)\r\n\r\n def bias_variable(shape,n):\r\n initial = tf.constant(0.1, shape=shape)\r\n return tf.Variable(initial,name=n)\r\n\r\n def avg_pool(input, s):\r\n return tf.nn.avg_pool(input, [ 1, s, s, 1 ], [1, s, s, 1 ], 'SAME')\r\n\r\n def max_pool(input, s):\r\n return tf.nn.max_pool(input, [ 1, s, s, 1 ], [1, s, s, 1 ], 'SAME')\r\n\r\n def conv2d_1(input, in_features, out_features, kernel_size, name=\"W\", with_bias=False):\r\n W = weight_variable([ kernel_size, kernel_size, in_features, out_features], name)\r\n conv = tf.nn.conv2d(input, W, [ 1, 1, 1, 1 ], padding='SAME')\r\n if with_bias:\r\n return conv + bias_variable([ out_features ])\r\n return conv\r\n\r\n def batch_activ_conv(current, in_features, out_features, kernel_size, is_training, keep_prob, idx, scope='conv_block'):\r\n with tf.variable_scope(scope):\r\n current = tf.layers.batch_normalization(current, scale=True, training=is_training)\r\n current = tf.nn.relu(current)\r\n current = conv2d_1(current, in_features, out_features, kernel_size, name=\"W\"+str(idx))\r\n current = tf.nn.dropout(current, keep_prob)\r\n return current\r\n\r\n def block(input, layers, in_features, growth, is_training, keep_prob, name=\"Block_\"):\r\n with tf.name_scope(name):\r\n with tf.variable_scope(name):\r\n current = input\r\n features = in_features\r\n for idx in range(layers):\r\n tmp = batch_activ_conv(current, features, growth, fs, is_training, keep_prob, idx+1, scope='conv_block_'+str(idx+1))\r\n current = tf.concat((current, tmp), axis=3)\r\n features += growth\r\n return current, features\r\n\r\n\r\n x = tf.placeholder(tf.float32, shape=[None, 784])\r\n y_ = tf.placeholder(tf.float32, shape=[None, number_of_classes])\r\n x_image = tf.reshape(x, [-1, 28, 28, 1])\r\n keep_prob = tf.placeholder(tf.float32)\r\n training = tf.placeholder(tf.bool)\r\n\r\n\r\n current = conv2d_1(x_image, 1, fm1, fs, name=\"W1\", with_bias=False)\r\n\r\n current, features = block(current, num_layers, fm1, k_fm, training, keep_prob, name=\"Block_1\")\r\n b1_conv_printop = tf.Print(current, [current])\r\n with tf.name_scope(\"transition_lyr\"):\r\n #current = batch_activ_conv(current, features, features, 1, training, keep_prob, 1, scope='Transition_layer_1')\r\n current = batch_activ_conv(current, features, bottleneck*k_fm, 1, training, keep_prob, 1, scope='Transition_layer_1')\r\n t1_b_conv_printop = tf.Print(current, [current])\r\n current = batch_activ_conv(current, bottleneck*k_fm, features, fs, training, keep_prob, 1, scope='Transition_layer_1_1')\r\n t1_conv_printop = tf.Print(current, [current])\r\n current = max_pool(current, 2)\r\n #current = avg_pool(current, 2)\r\n current, features = block(current, num_layers, features, k_fm, training, keep_prob, name=\"Block_2\")\r\n b2_conv_printop = tf.Print(current, [current])\r\n with tf.name_scope(\"transition_lyr_2\"):\r\n #current = batch_activ_conv(current, features, features, 1, training, keep_prob, 1, scope='Transition_layer_2')\r\n current = batch_activ_conv(current, features, bottleneck*k_fm, 1, training, keep_prob, 1, scope='Transition_layer_2')\r\n t2_b_conv_printop = tf.Print(current, [current])\r\n current = batch_activ_conv(current, bottleneck*k_fm, features, fs, training, keep_prob, 1, scope='Transition_layer_2_1')\r\n t2_conv_printop = tf.Print(current, [current])\r\n current = max_pool(current, 2)\r\n #current = avg_pool(current, 2)\r\n current, features = block(current, num_layers, features, k_fm, training, keep_prob, name=\"Block_3\")\r\n b3_conv_printop = tf.Print(current, [current])\r\n with tf.name_scope(\"transition_lyr_3\"):\r\n #current = batch_activ_conv(current, features, features, 1, training, keep_prob, 1, scope='Transition_layer_3')\r\n current = batch_activ_conv(current, features, bottleneck*k_fm, 1, training, keep_prob, 1, scope='Transition_layer_3')\r\n t3_b_conv_printop = tf.Print(current, [current])\r\n current = batch_activ_conv(current, bottleneck*k_fm, features, fs, training, keep_prob, 1, scope='Transition_layer_3_1')\r\n t3_conv_printop = tf.Print(current, [current])\r\n current = avg_pool(current, 7)\r\n current = tf.reshape(current, [tf.shape(current)[0], -1])\r\n\r\n with tf.name_scope(\"Dense_Last_lyr\"):\r\n W_fc3 = weight_variable([fc_nodes[0], number_of_classes],\"w_fc3\")\r\n b_fc3 = bias_variable([number_of_classes],\"b_fc3\")\r\n y_conv = tf.matmul(current, W_fc3) + b_fc3\r\n prediction_prob = tf.nn.softmax(y_conv)\r\n prediction_prob_printop = tf.Print(prediction_prob, [prediction_prob])\r\n\r\n with tf.name_scope(\"Xent\"):\r\n cross_entropy = tf.reduce_mean(\r\n tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y_conv))\r\n\r\n with tf.name_scope(\"train\"):\r\n extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\r\n with tf.control_dependencies(extra_update_ops):\r\n train_step = tf.train.AdamOptimizer(learn_rate).minimize(cross_entropy)\r\n\r\n with tf.name_scope(\"accuracy\"):\r\n correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))\r\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\r\n wrong_prediction = tf.not_equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))\r\n wrong_prediction_printop = tf.Print(wrong_prediction, [wrong_prediction])\r\n predicted_labels = tf.argmax(y_conv, 1)\r\n predicted_labels_printop = tf.Print(predicted_labels, [predicted_labels])\r\n\r\n index = 0\r\n index_end = index + batch_size\r\n remaining = 0\r\n start_time = time.time()\r\n costs = []\r\n accuracy_list = []\r\n list_of_predicted_list = []\r\n\r\n with tf.Session() as sess:\r\n sess.run(tf.global_variables_initializer(),tf.set_random_seed(0))\r\n for i in range(iteration):\r\n if index_end > total_trainImages:\r\n remaining = total_trainImages - (index_end-batch_size) \r\n images = Train_Images[(index_end-batch_size):total_trainImages, :]\r\n labels = Train_Labels[(index_end-batch_size):total_trainImages, :]\r\n index = 0\r\n index_end = index + batch_size - remaining\r\n images = np.vstack((images, Train_Images[index:index_end, :]))\r\n labels = np.vstack((labels, Train_Labels[index:index_end, :]))\r\n batch = (images, labels)\r\n index = index_end\r\n index_end = index + batch_size\r\n else:\r\n batch = (Train_Images[index:index_end, :], Train_Labels[index:index_end, :])\r\n index = index + batch_size \r\n index_end = index_end + batch_size\r\n\r\n if i in list_to_display:\r\n elapsed_time = time.time() - start_time\r\n print('Elapsed Time Before for loop: %f secs' % elapsed_time)\r\n Accuracy = 0\r\n itrt_index = i\r\n print('debug: %d & %d' % (iteration,i))\r\n\r\n if Dataset == '1':\r\n if file_name == '5th_fold':\r\n num_test = 13154\r\n else:\r\n num_test = 13155\r\n elif Dataset == '2':\r\n num_test = 503\r\n elif Dataset == '3':\r\n num_test = 400\r\n print(num_test)\r\n\r\n for img_index in range(num_test):\r\n t_image = np.array(Test_Images[img_index,:]).reshape(1,784)\r\n t_label = np.array(Test_Labels[img_index,:]).reshape(1,number_of_classes)\r\n test_acc = accuracy.eval(feed_dict={\r\n x: t_image, y_: t_label,\r\n keep_prob: 1.0, training:False})\r\n Accuracy += test_acc\r\n wrong, predicted, prediction_prob = sess.run([wrong_prediction_printop, \r\n predicted_labels_printop,prediction_prob_printop], \r\n feed_dict={\r\n x: t_image, y_: t_label, \r\n keep_prob: 1.0, training:False})\r\n if img_index <= 3:\r\n b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = sess.run([b1_conv_printop, b2_conv_printop, b3_conv_printop,\r\n t1_conv_printop,t2_conv_printop, t3_conv_printop, t1_b_conv_printop, t2_b_conv_printop, t3_b_conv_printop], \r\n feed_dict={\r\n x: t_image, y_: t_label, \r\n keep_prob: 1.0, training:False})\r\n if img_index == 0:\r\n b1_list = b1\r\n b2_list = b2\r\n b3_list = b3\r\n t1_list = t1\r\n t2_list = t2\r\n t3_list = t3\r\n t1_b_list = t1_b\r\n t2_b_list = t2_b\r\n t3_b_list = t3_b\r\n else:\r\n b1_list = np.append(b1_list,b1,axis=0)\r\n b2_list = np.append(b2_list,b2,axis=0)\r\n b3_list = np.append(b3_list,b3,axis=0)\r\n t1_list = np.append(t1_list,t1,axis=0)\r\n t2_list = np.append(t2_list,t2,axis=0)\r\n t3_list = np.append(t3_list,t3,axis=0)\r\n t1_b_list = np.append(t1_b_list,t1_b,axis=0)\r\n t2_b_list = np.append(t2_b_list,t2_b,axis=0)\r\n t3_b_list = np.append(t3_b_list,t3_b,axis=0) \r\n if img_index == 0 :\r\n wrong_list_1 = wrong\r\n predicted_list_1 = predicted\r\n prediction_prob_1 = prediction_prob\r\n else:\r\n wrong_list_1 = np.append(wrong_list_1,wrong,axis=0)\r\n predicted_list_1 = np.append(predicted_list_1,predicted,axis=0)\r\n prediction_prob_1 = np.append(prediction_prob_1, prediction_prob)\r\n\r\n\r\n Accuracy = Accuracy/num_test\r\n accuracy_list.append(Accuracy)\r\n list_of_predicted_list.append(predicted_list_1)\r\n print('Average test accuracy: %g' % Accuracy)\r\n epoch_around = math.ceil((itrt_index * batch_size) / total_trainImages)\r\n sio.savemat('D'+Dataset+'_'+file_name+'_'+str(epoch_around)+'ep_'+data_type+'_predicted_labels_list.mat', {'wrong_list':wrong_list_1, 'predicted_list': predicted_list_1, 'Target_labels':Target_labels, \r\n 'prediction_prob':prediction_prob, 'b1_list':b1_list, 'b2_list':b2_list, 'b3_list':b3_list, 't1_list':t1_list,\r\n 't2_list':t2_list, 't3_list':t3_list, 't1_b_list':t1_b_list, 't2_b_list':t2_b_list, 't3_b_list':t3_b_list})\r\n\r\n elapsed_time = time.time() - start_time\r\n print('Elapsed Time: %f secs' % elapsed_time)\r\n print('Batch Size & Iteration & Total Train Imgs : %d & %d & %d' % (batch_size, itrt_index, total_trainImages)) \r\n print('learning_rate : %g ' % learn_rate)\r\n print('1st conv FMaps : %d ' % fm1) \r\n print('number of layers in dense block : %d ' % num_layers) \r\n print('growth rate(k_fm) : %d ' % k_fm)\r\n print('filter size : %d ' % fs)\r\n print('bottleneck : %d' % bottleneck)\r\n print('dropout prob : %g ' % dropout_prob)\r\n print('data_type :', data_type)\r\n\r\n print('file_name :', file_name)\r\n\r\n print('FC nodes : %d' % fc_nodes[0])\r\n\r\n epoch_around = (itrt_index * batch_size) / total_trainImages\r\n print('Number of epochs : %f ' % epoch_around)\r\n\r\n # plot the cost\r\n plt.plot(np.squeeze(costs))\r\n plt.ylabel('cost')\r\n plt.xlabel('iterations (per tens)')\r\n plt.title(\"Learning rate =\" + str(learn_rate))\r\n plt.show()\r\n\r\n if i % 100 == 0:\r\n train_accuracy = accuracy.eval(feed_dict={\r\n x: batch[0], y_: batch[1], \r\n keep_prob: 1.0, training:False})\r\n print('step %d, training accuracy %g' % (i, train_accuracy))\r\n _, loss = sess.run([train_step, cross_entropy], \r\n feed_dict={x: batch[0], y_: batch[1], \r\n keep_prob: dropout_prob, training:True})\r\n\r\n iteration_cost = 0 # Defines a cost related to an epoch\r\n num_minibatches = int(total_trainImages / batch_size) # number of minibatches of size minibatch_size in the train set \r\n iteration_cost += loss / num_minibatches\r\n costs.append(iteration_cost)\r\n if i % 100 == 0:\r\n print ('Loss: ',loss)\r\n\r\n\r\n Accuracy = 0\r\n training_time = time.time() - start_time\r\n print('Training Time: %f secs' % training_time)\r\n\r\n\r\n if Dataset == '1':\r\n if file_name == '5th_fold':\r\n num_test = 13154\r\n else:\r\n num_test = 13155\r\n elif Dataset == '2':\r\n num_test = 503\r\n elif Dataset == '3':\r\n num_test = 400\r\n print(num_test)\r\n\r\n for img_index in range(num_test):\r\n t_image = np.array(Test_Images[img_index,:]).reshape(1,784)\r\n t_label = np.array(Test_Labels[img_index,:]).reshape(1,number_of_classes)\r\n test_acc = accuracy.eval(feed_dict={\r\n x: t_image, y_: t_label,\r\n keep_prob: 1.0, training:False})\r\n Accuracy += test_acc\r\n wrong, predicted = sess.run([wrong_prediction_printop, predicted_labels_printop], feed_dict={\r\n x: t_image, y_: t_label, \r\n keep_prob: 1.0, training:False})\r\n if img_index <= 3:\r\n b1, b2, b3, t1, t2, t3, t1_b, t2_b, t3_b = sess.run([b1_conv_printop, b2_conv_printop, b3_conv_printop,\r\n t1_conv_printop,t2_conv_printop, t3_conv_printop, t1_b_conv_printop, t2_b_conv_printop, t3_b_conv_printop], \r\n feed_dict={\r\n x: t_image, y_: t_label, \r\n keep_prob: 1.0, training:False})\r\n if img_index == 0:\r\n b1_list = b1\r\n b2_list = b2\r\n b3_list = b3\r\n t1_list = t1\r\n t2_list = t2\r\n t3_list = t3\r\n t1_b_list = t1_b\r\n t2_b_list = t2_b\r\n t3_b_list = t3_b\r\n else:\r\n b1_list = np.append(b1_list,b1,axis=0)\r\n b2_list = np.append(b2_list,b2,axis=0)\r\n b3_list = np.append(b3_list,b3,axis=0)\r\n t1_list = np.append(t1_list,t1,axis=0)\r\n t2_list = np.append(t2_list,t2,axis=0)\r\n t3_list = np.append(t3_list,t3,axis=0)\r\n t1_b_list = np.append(t1_b_list,t1_b,axis=0)\r\n t2_b_list = np.append(t2_b_list,t2_b,axis=0)\r\n t3_b_list = np.append(t3_b_list,t3_b,axis=0) \r\n if img_index == 0 :\r\n wrong_list = wrong\r\n predicted_list = predicted\r\n else:\r\n wrong_list = np.append(wrong_list,wrong,axis=0)\r\n predicted_list = np.append(predicted_list,predicted,axis=0)\r\n\r\n\r\n Accuracy = Accuracy/num_test\r\n print('Average test accuracy: %g' % Accuracy)\r\n accuracy_list.append(Accuracy)\r\n list_of_predicted_list.append(predicted_list)\r\n\r\n elapsed_time = time.time() - start_time\r\n print('Elapsed Time: %f secs' % elapsed_time)\r\n print('Batch Size & Iteration & Total Train Imgs : %d & %d & %d' % (batch_size, itrt_index, total_trainImages)) \r\n print('learning_rate : %g ' % learn_rate)\r\n print('1st conv FMaps : %d ' % fm1) \r\n print('number of layers in dense block : %d ' % num_layers) \r\n print('growth rate(k_fm) : %d ' % k_fm)\r\n print('filter size : %d ' % fs)\r\n print('bottleneck : %d' % bottleneck)\r\n print('dropout prob : %g ' % dropout_prob)\r\n print('data_type :', data_type)\r\n\r\n print('file_name :', file_name)\r\n\r\n print('FC nodes : %d' % fc_nodes[0])\r\n\r\n epoch_around = math.ceil((iteration * batch_size) / total_trainImages)\r\n if epoch_around == 51:\r\n epoch_around = 50\r\n print('Number of epochs : %f ' % epoch_around)\r\n\r\n\r\n # plot the cost\r\n plt.plot(np.squeeze(costs))\r\n plt.ylabel('cost')\r\n plt.xlabel('iterations (per tens)')\r\n plt.title(\"Learning rate =\" + str(learn_rate))\r\n plt.show()\r\n\r\n sio.savemat('D'+Dataset+'_'+file_name+'_'+str(epoch_around)+'ep_'+data_type+'_predicted_labels_list.mat', {'wrong_list':wrong_list, 'predicted_list': predicted_list, 'Target_labels':Target_labels, 'accuracy_list':accuracy_list, 'list_of_predicted_list':list_of_predicted_list, 'costs':costs, 'b1_list':b1_list, 'b2_list':b2_list, 'b3_list':b3_list, 't1_list':t1_list,\r\n 't2_list':t2_list, 't3_list':t3_list, 't1_b_list':t1_b_list, 't2_b_list':t2_b_list, 't3_b_list':t3_b_list})\r\n \r\n \r\nclass MyModel:\r\n num_layers = 4\r\n k_fm = 24\r\n fs = 3\r\n fm1 = 32\r\n bottleneck = 4\r\n dropout_prob = 0.8\r\n batch_size = [16]\r\n learn_rate = 0.001\r\n num_of_test = 40\r\n\r\nmodel = MyModel()\r\n \r\n\r\ndensenet('1','org',1,model)\r\ndensenet('1','org',2,model)\r\ndensenet('1','org',3,model)\r\ndensenet('1','org',4,model)\r\ndensenet('1','org',5,model)\r\n\r\ndensenet('1','aug',1,model)\r\ndensenet('1','aug',2,model)\r\ndensenet('1','aug',3,model)\r\ndensenet('1','aug',4,model)\r\ndensenet('1','aug',5,model)\r\n\r\ndensenet('2','org',1,model)\r\ndensenet('2','org',2,model)\r\ndensenet('2','org',3,model)\r\ndensenet('2','org',4,model)\r\ndensenet('2','org',5,model)\r\n \r\ndensenet('2','aug',1,model)\r\ndensenet('2','aug',2,model)\r\ndensenet('2','aug',3,model)\r\ndensenet('2','aug',4,model)\r\ndensenet('2','aug',5,model)\r\n\r\ndensenet('3','org',1,model)\r\ndensenet('3','org',2,model)\r\ndensenet('3','org',3,model)\r\ndensenet('3','org',4,model)\r\ndensenet('3','org',5,model)\r\n\r\ndensenet('3','aug',1,model)\r\ndensenet('3','aug',2,model)\r\ndensenet('3','aug',3,model)\r\ndensenet('3','aug',4,model)\r\ndensenet('3','aug',5,model)\r\n", "step-ids": [ 1, 2, 3, 5, 6 ] }
[ 1, 2, 3, 5, 6 ]
#!/usr/bin/evn python #-*-coding:utf8 -*- import os, sys, json class settings(object): filename = '' config = {} def __init__(self): self.DEBUG = os.environ.get('RdsMonitor_DEBUG', 0) def get_settings(self): """Parses the settings from redis-live.conf. """ # TODO: Consider YAML. Human writable, machine readable. with open(self.filename) as fp: try: return json.load(fp) except Exception, e: if self.DEBUG: print >>sys.stderr, 'get_settings exception:', e return {} def get_redis_servers(self): if self.DEBUG: print >>sys.stderr, "get_redis_servers config:%s"%self.config return self.config.get("RedisServers", '') def get_redis_stats_server(self): if self.DEBUG: print >>sys.stderr, "get_redis_stats_server config:%s"%self.config return self.config.get("RedisStatsServer", '') def get_data_store_type(self): if self.DEBUG: print >>sys.stderr, "get_redis_stats_server config:%s"%self.config return self.config.get("DataStoreType", '') def get_sqlite_stats_store(self): if self.DEBUG: print >>sys.stderr, "get_redis_stats_server config:%s"%self.config return self.config.get("SqliteStatsStore", '')
normal
{ "blob_id": "2c960685eaa14861c1c5b3ddb38b366a3e0e8e86", "index": 1339, "step-1": "#!/usr/bin/evn python\n#-*-coding:utf8 -*-\n\n\nimport os, sys, json\n\nclass settings(object):\n\tfilename = ''\n\tconfig = {}\n\t\n\tdef __init__(self):\n\t\tself.DEBUG = os.environ.get('RdsMonitor_DEBUG', 0)\n\t\t\n\tdef get_settings(self):\n\t\t\"\"\"Parses the settings from redis-live.conf.\n\t\t\"\"\"\n\n\t\t# TODO: Consider YAML. Human writable, machine readable.\n\t\twith open(self.filename) as fp:\n\t\t\ttry:\n\t\t\t\treturn json.load(fp)\n\t\t\texcept Exception, e:\n\t\t\t\tif self.DEBUG:\n\t\t\t\t\tprint >>sys.stderr, 'get_settings exception:', e\n\t\t\t\treturn {}\n\n\tdef get_redis_servers(self):\t\t\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_servers config:%s\"%self.config\n\t\treturn self.config.get(\"RedisServers\", '')\n\t\n\t\n\tdef get_redis_stats_server(self):\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_stats_server config:%s\"%self.config\n\t\treturn self.config.get(\"RedisStatsServer\", '')\n\t\n\t\n\tdef get_data_store_type(self):\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_stats_server config:%s\"%self.config\n\t\treturn self.config.get(\"DataStoreType\", '')\n\t\n\t\n\tdef get_sqlite_stats_store(self):\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_stats_server config:%s\"%self.config\n\t\treturn self.config.get(\"SqliteStatsStore\", '')", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import sqlite3 import argparse import json import index_db from collections import defaultdict def query_doc(cursor, lang, title): cursor.execute(index_db.select_lang_title, (lang, title)) result = cursor.fetchone() if not result: return None return { 'lang': result[0], 'doc_id': result[1], 'doc_path': result[2], # 'url': result[3], # I don't think url is needed here... 'title': result[4], 'begin': result[5], 'end': result[6] } def locate_single_topic_texts(lang_title_dict, cursor): same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items()) return sorted( (i for i in same_topic if i), key=lambda x: x['lang'] ) def locate_interlanguage_texts(file_path, db_path): with open(file_path, 'rt') as f: interlangauge = json.load(f) with sqlite3.connect(db_path) as conn: c = conn.cursor() return [locate_single_topic_texts(pairs, c) for pairs in interlangauge] if __name__ == '__main__': parser = argparse.ArgumentParser( description='Locate same topic texts over multiple languages.') parser.add_argument('--db', dest='db_path', default=index_db.default_path, help='a sqlite database file generated by index.py') parser.add_argument('--input', dest='input_path', default='interlanguage_topics.json', help='a json file containing sets of topics over ' 'multiple languages') parser.add_argument('--output', dest='output_path', default='interlanguage_location.json', help='a json file locating same topic texts over ' 'multiple languages') args = parser.parse_args() location_infos = locate_interlanguage_texts(args.input_path, args.db_path) with open(args.output_path, 'wt') as f: json.dump(location_infos, f)
normal
{ "blob_id": "95e7e025660e71cbdf6a6a0812964fc26d4beec0", "index": 9657, "step-1": "<mask token>\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\n 'Locate same topic texts over multiple languages.')\n parser.add_argument('--db', dest='db_path', default=index_db.\n default_path, help='a sqlite database file generated by index.py')\n parser.add_argument('--input', dest='input_path', default=\n 'interlanguage_topics.json', help=\n 'a json file containing sets of topics over multiple languages')\n parser.add_argument('--output', dest='output_path', default=\n 'interlanguage_location.json', help=\n 'a json file locating same topic texts over multiple languages')\n args = parser.parse_args()\n location_infos = locate_interlanguage_texts(args.input_path, args.db_path)\n with open(args.output_path, 'wt') as f:\n json.dump(location_infos, f)\n", "step-4": "import sqlite3\nimport argparse\nimport json\nimport index_db\nfrom collections import defaultdict\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\n 'Locate same topic texts over multiple languages.')\n parser.add_argument('--db', dest='db_path', default=index_db.\n default_path, help='a sqlite database file generated by index.py')\n parser.add_argument('--input', dest='input_path', default=\n 'interlanguage_topics.json', help=\n 'a json file containing sets of topics over multiple languages')\n parser.add_argument('--output', dest='output_path', default=\n 'interlanguage_location.json', help=\n 'a json file locating same topic texts over multiple languages')\n args = parser.parse_args()\n location_infos = locate_interlanguage_texts(args.input_path, args.db_path)\n with open(args.output_path, 'wt') as f:\n json.dump(location_infos, f)\n", "step-5": "import sqlite3\nimport argparse\nimport json\nimport index_db\nfrom collections import defaultdict\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {\n 'lang': result[0],\n 'doc_id': result[1],\n 'doc_path': result[2],\n # 'url': result[3], # I don't think url is needed here...\n 'title': result[4],\n 'begin': result[5],\n 'end': result[6]\n }\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted(\n (i for i in same_topic if i),\n key=lambda x: x['lang']\n )\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(\n description='Locate same topic texts over multiple languages.')\n parser.add_argument('--db', dest='db_path', default=index_db.default_path,\n help='a sqlite database file generated by index.py')\n parser.add_argument('--input', dest='input_path',\n default='interlanguage_topics.json',\n help='a json file containing sets of topics over '\n 'multiple languages')\n parser.add_argument('--output', dest='output_path',\n default='interlanguage_location.json',\n help='a json file locating same topic texts over '\n 'multiple languages')\n args = parser.parse_args()\n location_infos = locate_interlanguage_texts(args.input_path, args.db_path)\n with open(args.output_path, 'wt') as f:\n json.dump(location_infos, f)\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> def main(forecast, name, levels, *args, **kwargs): nt = len(forecast) rows = nt / columns + 1 fig = plt.figure(figsize=(18, 10 * float(rows) / columns)) for n, cubes in enumerate(forecast): row = n / columns column = n - row * columns print(row, column) ax = plt.subplot2grid((rows, columns), (row, column)) cube = convert.calc(name, cubes, levels=levels)[0] im = iplt.pcolormesh(cube, *args, **kwargs) add_map() ax = plt.subplot2grid((rows, columns), (row, column + 1)) cbar = plt.colorbar(im, cax=ax, orientation='horizontal') plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1] [0]) + '.png') return <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(forecast, name, levels, *args, **kwargs): nt = len(forecast) rows = nt / columns + 1 fig = plt.figure(figsize=(18, 10 * float(rows) / columns)) for n, cubes in enumerate(forecast): row = n / columns column = n - row * columns print(row, column) ax = plt.subplot2grid((rows, columns), (row, column)) cube = convert.calc(name, cubes, levels=levels)[0] im = iplt.pcolormesh(cube, *args, **kwargs) add_map() ax = plt.subplot2grid((rows, columns), (row, column + 1)) cbar = plt.colorbar(im, cax=ax, orientation='horizontal') plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1] [0]) + '.png') return if __name__ == '__main__': forecast = case_studies.generate_season_forecast(2013, 11, 1) name = 'ertel_potential_vorticity' levels = 'air_potential_temperature', [320] main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r') <|reserved_special_token_1|> <|reserved_special_token_0|> columns = 3 def main(forecast, name, levels, *args, **kwargs): nt = len(forecast) rows = nt / columns + 1 fig = plt.figure(figsize=(18, 10 * float(rows) / columns)) for n, cubes in enumerate(forecast): row = n / columns column = n - row * columns print(row, column) ax = plt.subplot2grid((rows, columns), (row, column)) cube = convert.calc(name, cubes, levels=levels)[0] im = iplt.pcolormesh(cube, *args, **kwargs) add_map() ax = plt.subplot2grid((rows, columns), (row, column + 1)) cbar = plt.colorbar(im, cax=ax, orientation='horizontal') plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1] [0]) + '.png') return if __name__ == '__main__': forecast = case_studies.generate_season_forecast(2013, 11, 1) name = 'ertel_potential_vorticity' levels = 'air_potential_temperature', [320] main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r') <|reserved_special_token_1|> <|reserved_special_token_0|> import matplotlib.pyplot as plt import iris.plot as iplt from irise import convert from irise.plot.util import add_map from myscripts import plotdir from myscripts.models.um import case_studies columns = 3 def main(forecast, name, levels, *args, **kwargs): nt = len(forecast) rows = nt / columns + 1 fig = plt.figure(figsize=(18, 10 * float(rows) / columns)) for n, cubes in enumerate(forecast): row = n / columns column = n - row * columns print(row, column) ax = plt.subplot2grid((rows, columns), (row, column)) cube = convert.calc(name, cubes, levels=levels)[0] im = iplt.pcolormesh(cube, *args, **kwargs) add_map() ax = plt.subplot2grid((rows, columns), (row, column + 1)) cbar = plt.colorbar(im, cax=ax, orientation='horizontal') plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1] [0]) + '.png') return if __name__ == '__main__': forecast = case_studies.generate_season_forecast(2013, 11, 1) name = 'ertel_potential_vorticity' levels = 'air_potential_temperature', [320] main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r') <|reserved_special_token_1|> """Produce a multi-panel figure of each output lead time in a forecast """ import matplotlib.pyplot as plt import iris.plot as iplt from irise import convert from irise.plot.util import add_map from myscripts import plotdir from myscripts.models.um import case_studies columns = 3 def main(forecast, name, levels, *args, **kwargs): nt = len(forecast) rows = (nt / columns) + 1 fig = plt.figure(figsize=(18, 10 * float(rows) / columns)) for n, cubes in enumerate(forecast): row = n / columns column = n - row * columns print(row, column) ax = plt.subplot2grid((rows, columns), (row, column)) cube = convert.calc(name, cubes, levels=levels)[0] im = iplt.pcolormesh(cube, *args, **kwargs) add_map() ax = plt.subplot2grid((rows, columns), (row, column + 1)) cbar = plt.colorbar(im, cax=ax, orientation='horizontal') plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1][0]) + '.png') return if __name__ == '__main__': forecast = case_studies.generate_season_forecast(2013, 11, 1) name = 'ertel_potential_vorticity' levels = ('air_potential_temperature', [320]) main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r')
flexible
{ "blob_id": "310e6e693cdce6ff71d06eac86214a21bef236d4", "index": 7425, "step-1": "<mask token>\n\n\ndef main(forecast, name, levels, *args, **kwargs):\n nt = len(forecast)\n rows = nt / columns + 1\n fig = plt.figure(figsize=(18, 10 * float(rows) / columns))\n for n, cubes in enumerate(forecast):\n row = n / columns\n column = n - row * columns\n print(row, column)\n ax = plt.subplot2grid((rows, columns), (row, column))\n cube = convert.calc(name, cubes, levels=levels)[0]\n im = iplt.pcolormesh(cube, *args, **kwargs)\n add_map()\n ax = plt.subplot2grid((rows, columns), (row, column + 1))\n cbar = plt.colorbar(im, cax=ax, orientation='horizontal')\n plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1]\n [0]) + '.png')\n return\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef main(forecast, name, levels, *args, **kwargs):\n nt = len(forecast)\n rows = nt / columns + 1\n fig = plt.figure(figsize=(18, 10 * float(rows) / columns))\n for n, cubes in enumerate(forecast):\n row = n / columns\n column = n - row * columns\n print(row, column)\n ax = plt.subplot2grid((rows, columns), (row, column))\n cube = convert.calc(name, cubes, levels=levels)[0]\n im = iplt.pcolormesh(cube, *args, **kwargs)\n add_map()\n ax = plt.subplot2grid((rows, columns), (row, column + 1))\n cbar = plt.colorbar(im, cax=ax, orientation='horizontal')\n plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1]\n [0]) + '.png')\n return\n\n\nif __name__ == '__main__':\n forecast = case_studies.generate_season_forecast(2013, 11, 1)\n name = 'ertel_potential_vorticity'\n levels = 'air_potential_temperature', [320]\n main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r')\n", "step-3": "<mask token>\ncolumns = 3\n\n\ndef main(forecast, name, levels, *args, **kwargs):\n nt = len(forecast)\n rows = nt / columns + 1\n fig = plt.figure(figsize=(18, 10 * float(rows) / columns))\n for n, cubes in enumerate(forecast):\n row = n / columns\n column = n - row * columns\n print(row, column)\n ax = plt.subplot2grid((rows, columns), (row, column))\n cube = convert.calc(name, cubes, levels=levels)[0]\n im = iplt.pcolormesh(cube, *args, **kwargs)\n add_map()\n ax = plt.subplot2grid((rows, columns), (row, column + 1))\n cbar = plt.colorbar(im, cax=ax, orientation='horizontal')\n plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1]\n [0]) + '.png')\n return\n\n\nif __name__ == '__main__':\n forecast = case_studies.generate_season_forecast(2013, 11, 1)\n name = 'ertel_potential_vorticity'\n levels = 'air_potential_temperature', [320]\n main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r')\n", "step-4": "<mask token>\nimport matplotlib.pyplot as plt\nimport iris.plot as iplt\nfrom irise import convert\nfrom irise.plot.util import add_map\nfrom myscripts import plotdir\nfrom myscripts.models.um import case_studies\ncolumns = 3\n\n\ndef main(forecast, name, levels, *args, **kwargs):\n nt = len(forecast)\n rows = nt / columns + 1\n fig = plt.figure(figsize=(18, 10 * float(rows) / columns))\n for n, cubes in enumerate(forecast):\n row = n / columns\n column = n - row * columns\n print(row, column)\n ax = plt.subplot2grid((rows, columns), (row, column))\n cube = convert.calc(name, cubes, levels=levels)[0]\n im = iplt.pcolormesh(cube, *args, **kwargs)\n add_map()\n ax = plt.subplot2grid((rows, columns), (row, column + 1))\n cbar = plt.colorbar(im, cax=ax, orientation='horizontal')\n plt.savefig(plotdir + name + '_' + str(levels[0]) + '_' + str(levels[1]\n [0]) + '.png')\n return\n\n\nif __name__ == '__main__':\n forecast = case_studies.generate_season_forecast(2013, 11, 1)\n name = 'ertel_potential_vorticity'\n levels = 'air_potential_temperature', [320]\n main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r')\n", "step-5": "\"\"\"Produce a multi-panel figure of each output lead time in a forecast\n\"\"\"\n\nimport matplotlib.pyplot as plt\nimport iris.plot as iplt\nfrom irise import convert\nfrom irise.plot.util import add_map\nfrom myscripts import plotdir\nfrom myscripts.models.um import case_studies\n\ncolumns = 3\n\n\ndef main(forecast, name, levels, *args, **kwargs):\n nt = len(forecast)\n rows = (nt / columns) + 1\n fig = plt.figure(figsize=(18, 10 * float(rows) / columns))\n for n, cubes in enumerate(forecast):\n row = n / columns\n column = n - row * columns\n print(row, column)\n ax = plt.subplot2grid((rows, columns), (row, column))\n\n cube = convert.calc(name, cubes, levels=levels)[0]\n im = iplt.pcolormesh(cube, *args, **kwargs)\n add_map()\n\n ax = plt.subplot2grid((rows, columns), (row, column + 1))\n cbar = plt.colorbar(im, cax=ax, orientation='horizontal')\n plt.savefig(plotdir + name + '_' + str(levels[0]) +\n '_' + str(levels[1][0]) + '.png')\n\n return\n\n\nif __name__ == '__main__':\n forecast = case_studies.generate_season_forecast(2013, 11, 1)\n name = 'ertel_potential_vorticity'\n levels = ('air_potential_temperature', [320])\n main(forecast, name, levels, vmin=0, vmax=10, cmap='cubehelix_r')\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class ProductsOrderCartSerializer(ModelSerializer): class Meta: model = Product fields = ['id', 'title', 'slug', 'image'] class ProductDetailSerializer(TaggitSerializer, ModelSerializer): tags = TagListSerializerField() gallery = SerializerMethodField() color = SerializerMethodField() size = SerializerMethodField() category = SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate', 'provider_diamond_price'] def get_color(self, obj): result = obj.color.all() return ColorSerializer(instance=result, many=True).data def get_size(self, obj): result = obj.size.all() return SizeSerializer(instance=result, many=True).data def get_category(self, obj): return CategorySerializer(instance=obj.category).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ImageProductSerializer(ModelSerializer): class Meta: model = GalleryProduct fields = ['image', 'product'] <|reserved_special_token_1|> <|reserved_special_token_0|> class ImageCreateProductSerializer(serializers.Serializer): class Meta: model = GalleryProduct fields = ['image'] class ProductListSerializer(serializers.ModelSerializer): gallery = serializers.SerializerMethodField() category = serializers.SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product fields = ['id', 'rating', 'title', 'slug', 'image', 'gallery', 'category', 'price'] def get_category(self, obj): result = obj.category return CategorySerializer(instance=result).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ProductsOrderCartSerializer(ModelSerializer): class Meta: model = Product fields = ['id', 'title', 'slug', 'image'] class ProductDetailSerializer(TaggitSerializer, ModelSerializer): tags = TagListSerializerField() gallery = SerializerMethodField() color = SerializerMethodField() size = SerializerMethodField() category = SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate', 'provider_diamond_price'] def get_color(self, obj): result = obj.color.all() return ColorSerializer(instance=result, many=True).data def get_size(self, obj): result = obj.size.all() return SizeSerializer(instance=result, many=True).data def get_category(self, obj): return CategorySerializer(instance=obj.category).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ImageProductSerializer(ModelSerializer): class Meta: model = GalleryProduct fields = ['image', 'product'] <|reserved_special_token_1|> <|reserved_special_token_0|> class StoneSerilizer(ModelSerializer): class Meta: model = Stone fields = '__all__' class ImageCreateProductSerializer(serializers.Serializer): class Meta: model = GalleryProduct fields = ['image'] class ProductListSerializer(serializers.ModelSerializer): gallery = serializers.SerializerMethodField() category = serializers.SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product fields = ['id', 'rating', 'title', 'slug', 'image', 'gallery', 'category', 'price'] def get_category(self, obj): result = obj.category return CategorySerializer(instance=result).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ProductsOrderCartSerializer(ModelSerializer): class Meta: model = Product fields = ['id', 'title', 'slug', 'image'] class ProductDetailSerializer(TaggitSerializer, ModelSerializer): tags = TagListSerializerField() gallery = SerializerMethodField() color = SerializerMethodField() size = SerializerMethodField() category = SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate', 'provider_diamond_price'] def get_color(self, obj): result = obj.color.all() return ColorSerializer(instance=result, many=True).data def get_size(self, obj): result = obj.size.all() return SizeSerializer(instance=result, many=True).data def get_category(self, obj): return CategorySerializer(instance=obj.category).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ImageProductSerializer(ModelSerializer): class Meta: model = GalleryProduct fields = ['image', 'product'] <|reserved_special_token_1|> <|reserved_special_token_0|> class SizeSerializer(ModelSerializer): class Meta: model = Size fields = ['id', 'size'] class StoneSerilizer(ModelSerializer): class Meta: model = Stone fields = '__all__' class ImageCreateProductSerializer(serializers.Serializer): class Meta: model = GalleryProduct fields = ['image'] class ProductListSerializer(serializers.ModelSerializer): gallery = serializers.SerializerMethodField() category = serializers.SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product fields = ['id', 'rating', 'title', 'slug', 'image', 'gallery', 'category', 'price'] def get_category(self, obj): result = obj.category return CategorySerializer(instance=result).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ProductsOrderCartSerializer(ModelSerializer): class Meta: model = Product fields = ['id', 'title', 'slug', 'image'] class ProductDetailSerializer(TaggitSerializer, ModelSerializer): tags = TagListSerializerField() gallery = SerializerMethodField() color = SerializerMethodField() size = SerializerMethodField() category = SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate', 'provider_diamond_price'] def get_color(self, obj): result = obj.color.all() return ColorSerializer(instance=result, many=True).data def get_size(self, obj): result = obj.size.all() return SizeSerializer(instance=result, many=True).data def get_category(self, obj): return CategorySerializer(instance=obj.category).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ImageProductSerializer(ModelSerializer): class Meta: model = GalleryProduct fields = ['image', 'product'] <|reserved_special_token_1|> from django.db.models import Count from django.utils.text import slugify from rest_framework.serializers import ModelSerializer, SerializerMethodField, Serializer from rest_framework import serializers from category.models import Category from product.models import Product, GalleryProduct, Stone, Color, Size from category.api.serializers import CategorySerializer from extensions.calculations import calculating_gold_jewelry from taggit_serializer.serializers import ( TagListSerializerField, TaggitSerializer ) def _create_custom_uuid(): max_id = 1 ex_last_product = Product.objects.last() if ex_last_product: max_id = ex_last_product.id my_id = '{}{:07d}'.format('EUA', max_id if max_id is not None else 1) return my_id class ColorSerializer(ModelSerializer): class Meta: model = Color fields = ['id', 'color'] class SizeSerializer(ModelSerializer): class Meta: model = Size fields = ['id', 'size'] class StoneSerilizer(ModelSerializer): class Meta: model = Stone fields = '__all__' class ImageCreateProductSerializer(serializers.Serializer): class Meta: model = GalleryProduct fields = ['image'] class ProductListSerializer(serializers.ModelSerializer): gallery = serializers.SerializerMethodField() category = serializers.SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product fields = [ 'id', 'rating', 'title', 'slug', 'image', 'gallery', 'category', 'price' ] def get_category(self, obj): result = obj.category return CategorySerializer(instance=result).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ProductsOrderCartSerializer(ModelSerializer): class Meta: model = Product fields = ['id', 'title', 'slug', 'image'] class ProductDetailSerializer(TaggitSerializer, ModelSerializer): tags = TagListSerializerField() gallery = SerializerMethodField() color = SerializerMethodField() size = SerializerMethodField() category = SerializerMethodField() price = serializers.SerializerMethodField() class Meta: model = Product exclude = [ 'site_rate', 'is_rate_fixed', 'provider_gold_rate', 'provider_diamond_price', ] def get_color(self, obj): result = obj.color.all() return ColorSerializer(instance=result, many=True).data def get_size(self, obj): result = obj.size.all() return SizeSerializer(instance=result, many=True).data def get_category(self, obj): return CategorySerializer(instance=obj.category).data def get_gallery(self, obj): result = GalleryProduct.objects.filter(product_id=obj) return ImageProductSerializer(instance=result, many=True).data def get_price(self, obj): return obj.price class ImageProductSerializer(ModelSerializer): class Meta: model = GalleryProduct fields = ['image', 'product']
flexible
{ "blob_id": "8be6031caad26ec6b6b99b8d8b8f80d16ad243d4", "index": 7706, "step-1": "<mask token>\n\n\nclass ProductsOrderCartSerializer(ModelSerializer):\n\n\n class Meta:\n model = Product\n fields = ['id', 'title', 'slug', 'image']\n\n\nclass ProductDetailSerializer(TaggitSerializer, ModelSerializer):\n tags = TagListSerializerField()\n gallery = SerializerMethodField()\n color = SerializerMethodField()\n size = SerializerMethodField()\n category = SerializerMethodField()\n price = serializers.SerializerMethodField()\n\n\n class Meta:\n model = Product\n exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate',\n 'provider_diamond_price']\n\n def get_color(self, obj):\n result = obj.color.all()\n return ColorSerializer(instance=result, many=True).data\n\n def get_size(self, obj):\n result = obj.size.all()\n return SizeSerializer(instance=result, many=True).data\n\n def get_category(self, obj):\n return CategorySerializer(instance=obj.category).data\n\n def get_gallery(self, obj):\n result = GalleryProduct.objects.filter(product_id=obj)\n return ImageProductSerializer(instance=result, many=True).data\n\n def get_price(self, obj):\n return obj.price\n\n\nclass ImageProductSerializer(ModelSerializer):\n\n\n class Meta:\n model = GalleryProduct\n fields = ['image', 'product']\n", "step-2": "<mask token>\n\n\nclass ImageCreateProductSerializer(serializers.Serializer):\n\n\n class Meta:\n model = GalleryProduct\n fields = ['image']\n\n\nclass ProductListSerializer(serializers.ModelSerializer):\n gallery = serializers.SerializerMethodField()\n category = serializers.SerializerMethodField()\n price = serializers.SerializerMethodField()\n\n\n class Meta:\n model = Product\n fields = ['id', 'rating', 'title', 'slug', 'image', 'gallery',\n 'category', 'price']\n\n def get_category(self, obj):\n result = obj.category\n return CategorySerializer(instance=result).data\n\n def get_gallery(self, obj):\n result = GalleryProduct.objects.filter(product_id=obj)\n return ImageProductSerializer(instance=result, many=True).data\n\n def get_price(self, obj):\n return obj.price\n\n\nclass ProductsOrderCartSerializer(ModelSerializer):\n\n\n class Meta:\n model = Product\n fields = ['id', 'title', 'slug', 'image']\n\n\nclass ProductDetailSerializer(TaggitSerializer, ModelSerializer):\n tags = TagListSerializerField()\n gallery = SerializerMethodField()\n color = SerializerMethodField()\n size = SerializerMethodField()\n category = SerializerMethodField()\n price = serializers.SerializerMethodField()\n\n\n class Meta:\n model = Product\n exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate',\n 'provider_diamond_price']\n\n def get_color(self, obj):\n result = obj.color.all()\n return ColorSerializer(instance=result, many=True).data\n\n def get_size(self, obj):\n result = obj.size.all()\n return SizeSerializer(instance=result, many=True).data\n\n def get_category(self, obj):\n return CategorySerializer(instance=obj.category).data\n\n def get_gallery(self, obj):\n result = GalleryProduct.objects.filter(product_id=obj)\n return ImageProductSerializer(instance=result, many=True).data\n\n def get_price(self, obj):\n return obj.price\n\n\nclass ImageProductSerializer(ModelSerializer):\n\n\n class Meta:\n model = GalleryProduct\n fields = ['image', 'product']\n", "step-3": "<mask token>\n\n\nclass StoneSerilizer(ModelSerializer):\n\n\n class Meta:\n model = Stone\n fields = '__all__'\n\n\nclass ImageCreateProductSerializer(serializers.Serializer):\n\n\n class Meta:\n model = GalleryProduct\n fields = ['image']\n\n\nclass ProductListSerializer(serializers.ModelSerializer):\n gallery = serializers.SerializerMethodField()\n category = serializers.SerializerMethodField()\n price = serializers.SerializerMethodField()\n\n\n class Meta:\n model = Product\n fields = ['id', 'rating', 'title', 'slug', 'image', 'gallery',\n 'category', 'price']\n\n def get_category(self, obj):\n result = obj.category\n return CategorySerializer(instance=result).data\n\n def get_gallery(self, obj):\n result = GalleryProduct.objects.filter(product_id=obj)\n return ImageProductSerializer(instance=result, many=True).data\n\n def get_price(self, obj):\n return obj.price\n\n\nclass ProductsOrderCartSerializer(ModelSerializer):\n\n\n class Meta:\n model = Product\n fields = ['id', 'title', 'slug', 'image']\n\n\nclass ProductDetailSerializer(TaggitSerializer, ModelSerializer):\n tags = TagListSerializerField()\n gallery = SerializerMethodField()\n color = SerializerMethodField()\n size = SerializerMethodField()\n category = SerializerMethodField()\n price = serializers.SerializerMethodField()\n\n\n class Meta:\n model = Product\n exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate',\n 'provider_diamond_price']\n\n def get_color(self, obj):\n result = obj.color.all()\n return ColorSerializer(instance=result, many=True).data\n\n def get_size(self, obj):\n result = obj.size.all()\n return SizeSerializer(instance=result, many=True).data\n\n def get_category(self, obj):\n return CategorySerializer(instance=obj.category).data\n\n def get_gallery(self, obj):\n result = GalleryProduct.objects.filter(product_id=obj)\n return ImageProductSerializer(instance=result, many=True).data\n\n def get_price(self, obj):\n return obj.price\n\n\nclass ImageProductSerializer(ModelSerializer):\n\n\n class Meta:\n model = GalleryProduct\n fields = ['image', 'product']\n", "step-4": "<mask token>\n\n\nclass SizeSerializer(ModelSerializer):\n\n\n class Meta:\n model = Size\n fields = ['id', 'size']\n\n\nclass StoneSerilizer(ModelSerializer):\n\n\n class Meta:\n model = Stone\n fields = '__all__'\n\n\nclass ImageCreateProductSerializer(serializers.Serializer):\n\n\n class Meta:\n model = GalleryProduct\n fields = ['image']\n\n\nclass ProductListSerializer(serializers.ModelSerializer):\n gallery = serializers.SerializerMethodField()\n category = serializers.SerializerMethodField()\n price = serializers.SerializerMethodField()\n\n\n class Meta:\n model = Product\n fields = ['id', 'rating', 'title', 'slug', 'image', 'gallery',\n 'category', 'price']\n\n def get_category(self, obj):\n result = obj.category\n return CategorySerializer(instance=result).data\n\n def get_gallery(self, obj):\n result = GalleryProduct.objects.filter(product_id=obj)\n return ImageProductSerializer(instance=result, many=True).data\n\n def get_price(self, obj):\n return obj.price\n\n\nclass ProductsOrderCartSerializer(ModelSerializer):\n\n\n class Meta:\n model = Product\n fields = ['id', 'title', 'slug', 'image']\n\n\nclass ProductDetailSerializer(TaggitSerializer, ModelSerializer):\n tags = TagListSerializerField()\n gallery = SerializerMethodField()\n color = SerializerMethodField()\n size = SerializerMethodField()\n category = SerializerMethodField()\n price = serializers.SerializerMethodField()\n\n\n class Meta:\n model = Product\n exclude = ['site_rate', 'is_rate_fixed', 'provider_gold_rate',\n 'provider_diamond_price']\n\n def get_color(self, obj):\n result = obj.color.all()\n return ColorSerializer(instance=result, many=True).data\n\n def get_size(self, obj):\n result = obj.size.all()\n return SizeSerializer(instance=result, many=True).data\n\n def get_category(self, obj):\n return CategorySerializer(instance=obj.category).data\n\n def get_gallery(self, obj):\n result = GalleryProduct.objects.filter(product_id=obj)\n return ImageProductSerializer(instance=result, many=True).data\n\n def get_price(self, obj):\n return obj.price\n\n\nclass ImageProductSerializer(ModelSerializer):\n\n\n class Meta:\n model = GalleryProduct\n fields = ['image', 'product']\n", "step-5": "from django.db.models import Count\r\nfrom django.utils.text import slugify\r\n\r\nfrom rest_framework.serializers import ModelSerializer, SerializerMethodField, Serializer\r\nfrom rest_framework import serializers\r\n\r\nfrom category.models import Category\r\nfrom product.models import Product, GalleryProduct, Stone, Color, Size\r\nfrom category.api.serializers import CategorySerializer\r\nfrom extensions.calculations import calculating_gold_jewelry\r\nfrom taggit_serializer.serializers import (\r\n\tTagListSerializerField,\r\n\tTaggitSerializer\r\n\t)\r\n\r\n\r\ndef _create_custom_uuid():\r\n\tmax_id = 1\r\n\tex_last_product = Product.objects.last()\r\n\tif ex_last_product:\r\n\t\tmax_id = ex_last_product.id\r\n\r\n\tmy_id = '{}{:07d}'.format('EUA', max_id if max_id is not None else 1)\r\n\treturn my_id\r\n\r\n\r\nclass ColorSerializer(ModelSerializer):\r\n\tclass Meta:\r\n\t\tmodel = Color\r\n\t\tfields = ['id', 'color']\r\n\r\n\r\nclass SizeSerializer(ModelSerializer):\r\n\tclass Meta:\r\n\t\tmodel = Size\r\n\t\tfields = ['id', 'size']\r\n\r\n\r\nclass StoneSerilizer(ModelSerializer):\r\n\tclass Meta:\r\n\t\tmodel = Stone\r\n\t\tfields = '__all__'\r\n\t\t\r\n\r\nclass ImageCreateProductSerializer(serializers.Serializer):\r\n\tclass Meta:\r\n\t\tmodel = GalleryProduct\r\n\t\tfields = ['image']\r\n\t\r\n\r\nclass ProductListSerializer(serializers.ModelSerializer):\r\n\tgallery = serializers.SerializerMethodField()\r\n\tcategory = serializers.SerializerMethodField()\r\n\tprice = serializers.SerializerMethodField()\r\n\r\n\tclass Meta:\r\n\t\tmodel = Product\r\n\t\tfields = [\r\n\t\t\t'id',\r\n\t\t\t'rating',\r\n\t\t\t'title',\r\n\t\t\t'slug',\r\n\t\t\t'image',\r\n\t\t\t'gallery',\r\n\t\t\t'category',\r\n\t\t\t'price'\r\n\t\t]\r\n\r\n\tdef get_category(self, obj):\r\n\t\tresult = obj.category\r\n\t\treturn CategorySerializer(instance=result).data\r\n\r\n\tdef get_gallery(self, obj):\r\n\t\tresult = GalleryProduct.objects.filter(product_id=obj)\r\n\t\treturn ImageProductSerializer(instance=result, many=True).data\r\n\r\n\tdef get_price(self, obj):\r\n\t\treturn obj.price\r\n\r\n\r\nclass ProductsOrderCartSerializer(ModelSerializer):\r\n\r\n\tclass Meta:\r\n\t\tmodel = Product\r\n\t\tfields = ['id', 'title', 'slug', 'image']\r\n\r\n\r\nclass ProductDetailSerializer(TaggitSerializer, ModelSerializer):\r\n\ttags = TagListSerializerField()\r\n\tgallery = SerializerMethodField()\r\n\tcolor = SerializerMethodField()\r\n\tsize = SerializerMethodField()\r\n\tcategory = SerializerMethodField()\r\n\tprice = serializers.SerializerMethodField()\r\n\r\n\tclass Meta:\r\n\t\tmodel = Product\r\n\t\texclude = [\r\n\t\t\t'site_rate',\r\n\t\t\t'is_rate_fixed',\r\n\t\t\t'provider_gold_rate',\r\n\t\t\t'provider_diamond_price',\r\n\t\t]\r\n\r\n\tdef get_color(self, obj):\r\n\t\tresult = obj.color.all()\r\n\t\treturn ColorSerializer(instance=result, many=True).data\r\n\r\n\tdef get_size(self, obj):\r\n\t\tresult = obj.size.all()\r\n\t\treturn SizeSerializer(instance=result, many=True).data\r\n\r\n\tdef get_category(self, obj):\r\n\t\treturn CategorySerializer(instance=obj.category).data\r\n\r\n\tdef get_gallery(self, obj):\r\n\t\tresult = GalleryProduct.objects.filter(product_id=obj)\r\n\t\treturn ImageProductSerializer(instance=result, many=True).data\r\n\r\n\tdef get_price(self, obj):\r\n\t\treturn obj.price\r\n\r\n\r\nclass ImageProductSerializer(ModelSerializer):\r\n\tclass Meta:\r\n\t\tmodel = GalleryProduct\r\n\t\tfields = ['image', 'product']\r\n", "step-ids": [ 9, 15, 16, 17, 21 ] }
[ 9, 15, 16, 17, 21 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def matrix(m): for i in range(len(m)): for j in range(len(m[0])): m[i][j] = m[i][j] ** 2 <|reserved_special_token_0|> <|reserved_special_token_1|> def matrix(m): for i in range(len(m)): for j in range(len(m[0])): m[i][j] = m[i][j] ** 2 <|reserved_special_token_0|> print('The matrix is ', a) matrix(a) print('The updated matrix is ', a) <|reserved_special_token_1|> def matrix(m): for i in range(len(m)): for j in range(len(m[0])): m[i][j] = m[i][j] ** 2 a = [[1, 2, 3], [4, 5, 6], [8, 9, 0]] print('The matrix is ', a) matrix(a) print('The updated matrix is ', a) <|reserved_special_token_1|> #8 def matrix(m): for i in range(len(m)): for j in range (len(m[0])): m[i][j]=(m[i][j])**2 a=[[1,2,3],[4,5,6],[8,9,0]] print('The matrix is ',a) matrix(a) print('The updated matrix is ',a)
flexible
{ "blob_id": "f46dd5217c8e015546d7fff7ee52569ecc2c8e41", "index": 5487, "step-1": "<mask token>\n", "step-2": "def matrix(m):\n for i in range(len(m)):\n for j in range(len(m[0])):\n m[i][j] = m[i][j] ** 2\n\n\n<mask token>\n", "step-3": "def matrix(m):\n for i in range(len(m)):\n for j in range(len(m[0])):\n m[i][j] = m[i][j] ** 2\n\n\n<mask token>\nprint('The matrix is ', a)\nmatrix(a)\nprint('The updated matrix is ', a)\n", "step-4": "def matrix(m):\n for i in range(len(m)):\n for j in range(len(m[0])):\n m[i][j] = m[i][j] ** 2\n\n\na = [[1, 2, 3], [4, 5, 6], [8, 9, 0]]\nprint('The matrix is ', a)\nmatrix(a)\nprint('The updated matrix is ', a)\n", "step-5": "#8\ndef matrix(m):\n for i in range(len(m)):\n for j in range (len(m[0])):\n m[i][j]=(m[i][j])**2 \n\na=[[1,2,3],[4,5,6],[8,9,0]]\nprint('The matrix is ',a)\nmatrix(a)\nprint('The updated matrix is ',a)\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import psycopg2 host = "datavis.cauuh8vzeelb.us-east-1.rds.amazonaws.com" database = "top5" user = "teamwonder" password = "visproject" Gentrifying = [10002,10003,10009,10026,10027,10029,10030,10031,10032,10033,10034,10035,10037,10039,10040,10454,10455,10456,10457,10458,10459,10460,10474,11102,11103,11105,11106,11206,11211,11212,11213,11216,11220,11221,11222,11225,11232,11233,11237,11249,11370] Non_Gentrifying = [10451,10452,10453,10463,10468,10472,10473,11204,11208,11214,11223,11224,11239] Higher_Income = [83,7020,7030,7114,10000,10001,10004,10005,10006,10007,10010,10011,10012,10013,10014,10016,10017,10018,10019,10020,10021,10022,10023,10024,10025,10028,10036,10038,10041,10044,10045,10048,10055,10065,10069,10075,10103,10104,10105,10107,10111,10112,10118,10119,10120,10121,10122,10123,10128,10129,10153,10154,10155,10158,10162,10165,10166,10167,10168,10169,10170,10171,10172,10173,10177,10178,10179,10270,10271,10278,10279,10280,10281,10282,10301,10302,10303,10304,10305,10306,10307,10308,10309,10310,10312,10314,10461,10462,10464,10465,10466,10467,10469,10470,10471,10475,10507,10704,10803,11001,11004,11005,11040,11101,11104,11109,11201,11203,11205,11207,11209,11210,11215,11217,11218,11219,11226,11228,11229,11230,11231,11234,11235,11236,11238,11241,11242,11251,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365,11366,11367,11368,11369,11371,11372,11373,11374,11375,11377,11378,11379,11385,11411,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11426,11427,11428,11429,11430,11432,11433,11434,11435,11436,11530,11691,11692,11693,11694,11695,11697] con = psycopg2.connect(host=host, database=database, user=user, password=password) cur = con.cursor()
normal
{ "blob_id": "0ebf5646ee9693b7d0c1de61436e05b3725b2c9f", "index": 2560, "step-1": "<mask token>\n", "step-2": "<mask token>\nhost = 'datavis.cauuh8vzeelb.us-east-1.rds.amazonaws.com'\ndatabase = 'top5'\nuser = 'teamwonder'\npassword = 'visproject'\nGentrifying = [10002, 10003, 10009, 10026, 10027, 10029, 10030, 10031, \n 10032, 10033, 10034, 10035, 10037, 10039, 10040, 10454, 10455, 10456, \n 10457, 10458, 10459, 10460, 10474, 11102, 11103, 11105, 11106, 11206, \n 11211, 11212, 11213, 11216, 11220, 11221, 11222, 11225, 11232, 11233, \n 11237, 11249, 11370]\nNon_Gentrifying = [10451, 10452, 10453, 10463, 10468, 10472, 10473, 11204, \n 11208, 11214, 11223, 11224, 11239]\nHigher_Income = [83, 7020, 7030, 7114, 10000, 10001, 10004, 10005, 10006, \n 10007, 10010, 10011, 10012, 10013, 10014, 10016, 10017, 10018, 10019, \n 10020, 10021, 10022, 10023, 10024, 10025, 10028, 10036, 10038, 10041, \n 10044, 10045, 10048, 10055, 10065, 10069, 10075, 10103, 10104, 10105, \n 10107, 10111, 10112, 10118, 10119, 10120, 10121, 10122, 10123, 10128, \n 10129, 10153, 10154, 10155, 10158, 10162, 10165, 10166, 10167, 10168, \n 10169, 10170, 10171, 10172, 10173, 10177, 10178, 10179, 10270, 10271, \n 10278, 10279, 10280, 10281, 10282, 10301, 10302, 10303, 10304, 10305, \n 10306, 10307, 10308, 10309, 10310, 10312, 10314, 10461, 10462, 10464, \n 10465, 10466, 10467, 10469, 10470, 10471, 10475, 10507, 10704, 10803, \n 11001, 11004, 11005, 11040, 11101, 11104, 11109, 11201, 11203, 11205, \n 11207, 11209, 11210, 11215, 11217, 11218, 11219, 11226, 11228, 11229, \n 11230, 11231, 11234, 11235, 11236, 11238, 11241, 11242, 11251, 11354, \n 11355, 11356, 11357, 11358, 11359, 11360, 11361, 11362, 11363, 11364, \n 11365, 11366, 11367, 11368, 11369, 11371, 11372, 11373, 11374, 11375, \n 11377, 11378, 11379, 11385, 11411, 11412, 11413, 11414, 11415, 11416, \n 11417, 11418, 11419, 11420, 11421, 11422, 11423, 11426, 11427, 11428, \n 11429, 11430, 11432, 11433, 11434, 11435, 11436, 11530, 11691, 11692, \n 11693, 11694, 11695, 11697]\ncon = psycopg2.connect(host=host, database=database, user=user, password=\n password)\ncur = con.cursor()\n", "step-3": "import psycopg2\nhost = 'datavis.cauuh8vzeelb.us-east-1.rds.amazonaws.com'\ndatabase = 'top5'\nuser = 'teamwonder'\npassword = 'visproject'\nGentrifying = [10002, 10003, 10009, 10026, 10027, 10029, 10030, 10031, \n 10032, 10033, 10034, 10035, 10037, 10039, 10040, 10454, 10455, 10456, \n 10457, 10458, 10459, 10460, 10474, 11102, 11103, 11105, 11106, 11206, \n 11211, 11212, 11213, 11216, 11220, 11221, 11222, 11225, 11232, 11233, \n 11237, 11249, 11370]\nNon_Gentrifying = [10451, 10452, 10453, 10463, 10468, 10472, 10473, 11204, \n 11208, 11214, 11223, 11224, 11239]\nHigher_Income = [83, 7020, 7030, 7114, 10000, 10001, 10004, 10005, 10006, \n 10007, 10010, 10011, 10012, 10013, 10014, 10016, 10017, 10018, 10019, \n 10020, 10021, 10022, 10023, 10024, 10025, 10028, 10036, 10038, 10041, \n 10044, 10045, 10048, 10055, 10065, 10069, 10075, 10103, 10104, 10105, \n 10107, 10111, 10112, 10118, 10119, 10120, 10121, 10122, 10123, 10128, \n 10129, 10153, 10154, 10155, 10158, 10162, 10165, 10166, 10167, 10168, \n 10169, 10170, 10171, 10172, 10173, 10177, 10178, 10179, 10270, 10271, \n 10278, 10279, 10280, 10281, 10282, 10301, 10302, 10303, 10304, 10305, \n 10306, 10307, 10308, 10309, 10310, 10312, 10314, 10461, 10462, 10464, \n 10465, 10466, 10467, 10469, 10470, 10471, 10475, 10507, 10704, 10803, \n 11001, 11004, 11005, 11040, 11101, 11104, 11109, 11201, 11203, 11205, \n 11207, 11209, 11210, 11215, 11217, 11218, 11219, 11226, 11228, 11229, \n 11230, 11231, 11234, 11235, 11236, 11238, 11241, 11242, 11251, 11354, \n 11355, 11356, 11357, 11358, 11359, 11360, 11361, 11362, 11363, 11364, \n 11365, 11366, 11367, 11368, 11369, 11371, 11372, 11373, 11374, 11375, \n 11377, 11378, 11379, 11385, 11411, 11412, 11413, 11414, 11415, 11416, \n 11417, 11418, 11419, 11420, 11421, 11422, 11423, 11426, 11427, 11428, \n 11429, 11430, 11432, 11433, 11434, 11435, 11436, 11530, 11691, 11692, \n 11693, 11694, 11695, 11697]\ncon = psycopg2.connect(host=host, database=database, user=user, password=\n password)\ncur = con.cursor()\n", "step-4": "import psycopg2\n\nhost = \"datavis.cauuh8vzeelb.us-east-1.rds.amazonaws.com\"\ndatabase = \"top5\"\nuser = \"teamwonder\"\npassword = \"visproject\"\n\nGentrifying = [10002,10003,10009,10026,10027,10029,10030,10031,10032,10033,10034,10035,10037,10039,10040,10454,10455,10456,10457,10458,10459,10460,10474,11102,11103,11105,11106,11206,11211,11212,11213,11216,11220,11221,11222,11225,11232,11233,11237,11249,11370]\nNon_Gentrifying = [10451,10452,10453,10463,10468,10472,10473,11204,11208,11214,11223,11224,11239]\nHigher_Income = [83,7020,7030,7114,10000,10001,10004,10005,10006,10007,10010,10011,10012,10013,10014,10016,10017,10018,10019,10020,10021,10022,10023,10024,10025,10028,10036,10038,10041,10044,10045,10048,10055,10065,10069,10075,10103,10104,10105,10107,10111,10112,10118,10119,10120,10121,10122,10123,10128,10129,10153,10154,10155,10158,10162,10165,10166,10167,10168,10169,10170,10171,10172,10173,10177,10178,10179,10270,10271,10278,10279,10280,10281,10282,10301,10302,10303,10304,10305,10306,10307,10308,10309,10310,10312,10314,10461,10462,10464,10465,10466,10467,10469,10470,10471,10475,10507,10704,10803,11001,11004,11005,11040,11101,11104,11109,11201,11203,11205,11207,11209,11210,11215,11217,11218,11219,11226,11228,11229,11230,11231,11234,11235,11236,11238,11241,11242,11251,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365,11366,11367,11368,11369,11371,11372,11373,11374,11375,11377,11378,11379,11385,11411,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11426,11427,11428,11429,11430,11432,11433,11434,11435,11436,11530,11691,11692,11693,11694,11695,11697]\n\ncon = psycopg2.connect(host=host, database=database, user=user, password=password)\ncur = con.cursor()\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from .__main__ import datajson_write, datajson_read
normal
{ "blob_id": "2269e74c006833976c3a28cd52c238e2dde20051", "index": 5871, "step-1": "<mask token>\n", "step-2": "from .__main__ import datajson_write, datajson_read\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> from .base import Sort
flexible
{ "blob_id": "de3a96d46b7eaf198b33efe78b21ef0207dcc609", "index": 8424, "step-1": "<mask token>\n", "step-2": "from .base import Sort\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> clf.fit(X, y) print('class_prior:', clf.class_prior) print('class_count_:', clf.class_count_) print('class_log_prior_:', clf.class_log_prior_) print('feature_count_:', clf.feature_count_) print('n_features_:', clf.n_features_) print('feature_log_prob_:', clf.feature_log_prob_) <|reserved_special_token_1|> <|reserved_special_token_0|> X = np.array([[1, 2, 3, 3], [1, 3, 4, 4], [2, 4, 5, 5]]) y = np.array([1, 2, 3]) <|reserved_special_token_0|> clf = BernoulliNB(alpha=2.0, binarize=3.0, fit_prior=True) clf.fit(X, y) print('class_prior:', clf.class_prior) print('class_count_:', clf.class_count_) print('class_log_prior_:', clf.class_log_prior_) print('feature_count_:', clf.feature_count_) print('n_features_:', clf.n_features_) print('feature_log_prob_:', clf.feature_log_prob_) <|reserved_special_token_1|> import numpy as np from sklearn.naive_bayes import BernoulliNB X = np.array([[1, 2, 3, 3], [1, 3, 4, 4], [2, 4, 5, 5]]) y = np.array([1, 2, 3]) <|reserved_special_token_0|> clf = BernoulliNB(alpha=2.0, binarize=3.0, fit_prior=True) clf.fit(X, y) print('class_prior:', clf.class_prior) print('class_count_:', clf.class_count_) print('class_log_prior_:', clf.class_log_prior_) print('feature_count_:', clf.feature_count_) print('n_features_:', clf.n_features_) print('feature_log_prob_:', clf.feature_log_prob_) <|reserved_special_token_1|> import numpy as np from sklearn.naive_bayes import BernoulliNB X = np.array([[1, 2, 3, 3], [1, 3, 4, 4], [2, 4, 5, 5]]) y = np.array([1, 2, 3]) """ alpha: 平滑系数 binarize: 将特征二值化的阈值 fit_prior: 使用数据拟合先验概率 """ clf = BernoulliNB(alpha=2.0, binarize=3.0, fit_prior=True) clf.fit(X, y) print("class_prior:", clf.class_prior) print("class_count_:", clf.class_count_) # 按类别顺序输出其对应个数 print("class_log_prior_:", clf.class_log_prior_) # 先验概率对数值 print("feature_count_:", clf.feature_count_) # 各类别个特征之和 print("n_features_:", clf.n_features_) print("feature_log_prob_:", clf.feature_log_prob_) # 指定类的各特征的条件概率的对数 # 其他参数与方法与MultinomialNB类似
flexible
{ "blob_id": "98a1fab8cee91f37ceee2cfd868d3a5756a055b0", "index": 7628, "step-1": "<mask token>\n", "step-2": "<mask token>\nclf.fit(X, y)\nprint('class_prior:', clf.class_prior)\nprint('class_count_:', clf.class_count_)\nprint('class_log_prior_:', clf.class_log_prior_)\nprint('feature_count_:', clf.feature_count_)\nprint('n_features_:', clf.n_features_)\nprint('feature_log_prob_:', clf.feature_log_prob_)\n", "step-3": "<mask token>\nX = np.array([[1, 2, 3, 3], [1, 3, 4, 4], [2, 4, 5, 5]])\ny = np.array([1, 2, 3])\n<mask token>\nclf = BernoulliNB(alpha=2.0, binarize=3.0, fit_prior=True)\nclf.fit(X, y)\nprint('class_prior:', clf.class_prior)\nprint('class_count_:', clf.class_count_)\nprint('class_log_prior_:', clf.class_log_prior_)\nprint('feature_count_:', clf.feature_count_)\nprint('n_features_:', clf.n_features_)\nprint('feature_log_prob_:', clf.feature_log_prob_)\n", "step-4": "import numpy as np\nfrom sklearn.naive_bayes import BernoulliNB\nX = np.array([[1, 2, 3, 3], [1, 3, 4, 4], [2, 4, 5, 5]])\ny = np.array([1, 2, 3])\n<mask token>\nclf = BernoulliNB(alpha=2.0, binarize=3.0, fit_prior=True)\nclf.fit(X, y)\nprint('class_prior:', clf.class_prior)\nprint('class_count_:', clf.class_count_)\nprint('class_log_prior_:', clf.class_log_prior_)\nprint('feature_count_:', clf.feature_count_)\nprint('n_features_:', clf.n_features_)\nprint('feature_log_prob_:', clf.feature_log_prob_)\n", "step-5": "import numpy as np\n\nfrom sklearn.naive_bayes import BernoulliNB\n\nX = np.array([[1, 2, 3, 3], [1, 3, 4, 4], [2, 4, 5, 5]])\ny = np.array([1, 2, 3])\n\"\"\"\nalpha: 平滑系数\nbinarize: 将特征二值化的阈值\nfit_prior: 使用数据拟合先验概率\n\"\"\"\nclf = BernoulliNB(alpha=2.0, binarize=3.0, fit_prior=True)\nclf.fit(X, y)\nprint(\"class_prior:\", clf.class_prior)\nprint(\"class_count_:\", clf.class_count_) # 按类别顺序输出其对应个数\nprint(\"class_log_prior_:\", clf.class_log_prior_) # 先验概率对数值\nprint(\"feature_count_:\", clf.feature_count_) # 各类别个特征之和\nprint(\"n_features_:\", clf.n_features_)\nprint(\"feature_log_prob_:\", clf.feature_log_prob_) # 指定类的各特征的条件概率的对数\n# 其他参数与方法与MultinomialNB类似\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for x in xs: dist += min(x, K - x) print(dist * 2) <|reserved_special_token_1|> N = int(input()) K = int(input()) xs = list(map(int, input().split())) dist = 0 for x in xs: dist += min(x, K - x) print(dist * 2)
flexible
{ "blob_id": "a65ab0faf08c13f007a132fb92f358a35834fdb7", "index": 2556, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor x in xs:\n dist += min(x, K - x)\nprint(dist * 2)\n", "step-3": "N = int(input())\nK = int(input())\nxs = list(map(int, input().split()))\ndist = 0\nfor x in xs:\n dist += min(x, K - x)\nprint(dist * 2)\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> while count < 9: print('Number:', count) count = count + 1 print('Good Bye') <|reserved_special_token_0|> for fruit in fruits: print('current fruits:', fruit) print('Good bye') <|reserved_special_token_1|> count = 0 while count < 9: print('Number:', count) count = count + 1 print('Good Bye') fruits = ['Mango', 'Grapes', 'Apple'] for fruit in fruits: print('current fruits:', fruit) print('Good bye') <|reserved_special_token_1|> #While Loop count = 0 while count<9: print("Number:",count) count = count+1 print("Good Bye") #For Loop fruits = ['Mango','Grapes','Apple'] for fruit in fruits: print("current fruits:",fruit) print("Good bye")
flexible
{ "blob_id": "9b3040fa02cf8f039bac146f8a73384731c56722", "index": 9142, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile count < 9:\n print('Number:', count)\n count = count + 1\nprint('Good Bye')\n<mask token>\nfor fruit in fruits:\n print('current fruits:', fruit)\nprint('Good bye')\n", "step-3": "count = 0\nwhile count < 9:\n print('Number:', count)\n count = count + 1\nprint('Good Bye')\nfruits = ['Mango', 'Grapes', 'Apple']\nfor fruit in fruits:\n print('current fruits:', fruit)\nprint('Good bye')\n", "step-4": "#While Loop\ncount = 0\nwhile count<9:\n print(\"Number:\",count)\n count = count+1\n\nprint(\"Good Bye\") \n\n#For Loop \nfruits = ['Mango','Grapes','Apple']\n\nfor fruit in fruits:\n print(\"current fruits:\",fruit)\n\nprint(\"Good bye\")\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def contruct_tree(pre_order, index=0): index += 1 if index >= len(pre_order): raise IndexError('wtf is wrong with you?') root = pre_order[index] if root is None: return None, index node = BST(root) node.left, index = construct(pre_order, index) node.right, index = construct(pre_order, index) return node, index <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def contruct_tree(pre_order, index=0): index += 1 if index >= len(pre_order): raise IndexError('wtf is wrong with you?') root = pre_order[index] if root is None: return None, index node = BST(root) node.left, index = construct(pre_order, index) node.right, index = construct(pre_order, index) return node, index def contruct_tree(pre_order): tree = BST(pre_order[0]) curr = tree stack = [] i = 0 while i < len(pre_order) - 1: if curr is not None: curr.left = L[i + 1] stack.append(curr) cur = curr.left else: curr = stack.pop() curr.right = L[i + 1] cur = curr.right return tree <|reserved_special_token_1|> """ You are given pre-order traversal with a slight modification. It includes null pointers when a particular node has nil left/right child. Reconstruct the binary tree with this information. Ex. [H, B, F, None, None, E, A, None, None, None, C, None, D, None, G, I, None, None, None] H / \ B C / \ \ F E D / \ A G / I """ # time: O(n) def contruct_tree(pre_order, index=0): index += 1 if index >= len(pre_order): raise IndexError('wtf is wrong with you?') root = pre_order[index] if root is None: return (None, index) node = BST(root) node.left, index = construct(pre_order, index) node.right, index = construct(pre_order, index) return (node, index) # my solution without recursion # works? def contruct_tree(pre_order): tree = BST(pre_order[0]) curr = tree stack = [] i = 0 while i < len(pre_order)-1: if curr is not None: curr.left = L[i+1] stack.append(curr) cur = curr.left else: curr = stack.pop() curr.right = L[i+1] cur = curr.right return tree
flexible
{ "blob_id": "3aee336956ac6f962c34f51a27dc4abebf2cc7c8", "index": 8474, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef contruct_tree(pre_order, index=0):\n index += 1\n if index >= len(pre_order):\n raise IndexError('wtf is wrong with you?')\n root = pre_order[index]\n if root is None:\n return None, index\n node = BST(root)\n node.left, index = construct(pre_order, index)\n node.right, index = construct(pre_order, index)\n return node, index\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef contruct_tree(pre_order, index=0):\n index += 1\n if index >= len(pre_order):\n raise IndexError('wtf is wrong with you?')\n root = pre_order[index]\n if root is None:\n return None, index\n node = BST(root)\n node.left, index = construct(pre_order, index)\n node.right, index = construct(pre_order, index)\n return node, index\n\n\ndef contruct_tree(pre_order):\n tree = BST(pre_order[0])\n curr = tree\n stack = []\n i = 0\n while i < len(pre_order) - 1:\n if curr is not None:\n curr.left = L[i + 1]\n stack.append(curr)\n cur = curr.left\n else:\n curr = stack.pop()\n curr.right = L[i + 1]\n cur = curr.right\n return tree\n", "step-4": "\"\"\"\nYou are given pre-order traversal with a slight modification. \nIt includes null pointers when a particular node has nil left/right child. \nReconstruct the binary tree with this information.\n\nEx. [H, B, F, None, None, E, A, None, None, None, C, None, D, None, G, I, None, None, None]\n\n H\n / \\\n B C\n / \\ \\\nF E D\n / \\\n A G\n /\n I\n\"\"\"\n\n# time: O(n)\ndef contruct_tree(pre_order, index=0):\n index += 1\n if index >= len(pre_order):\n raise IndexError('wtf is wrong with you?')\n\n root = pre_order[index]\n if root is None:\n return (None, index)\n\n\n node = BST(root)\n node.left, index = construct(pre_order, index)\n node.right, index = construct(pre_order, index)\n\n return (node, index)\n\n\n# my solution without recursion\n# works?\n\ndef contruct_tree(pre_order):\n tree = BST(pre_order[0])\n curr = tree\n stack = []\n i = 0\n while i < len(pre_order)-1:\n if curr is not None:\n curr.left = L[i+1]\n stack.append(curr)\n cur = curr.left\n else:\n curr = stack.pop()\n curr.right = L[i+1]\n cur = curr.right\n\n return tree\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# V0 class Codec: def encode(self, strs): s = "" for i in strs: s += str(len(i)) + "#" + i return s def decode(self, s): i, str = 0, [] while i < len(s): sharp = s.find("#", i) l = int(s[i:sharp]) str.append(s[sharp + 1:sharp + l + 1]) i = sharp + l + 1 return str # V1 # http://www.voidcn.com/article/p-hpbzcdjd-zo.html class Codec: def encode(self, strs): """Encodes a list of strings to a single string. :type strs: List[str] :rtype: str """ s = "" for i in strs: s += str(len(i)) + "#" + i return s def decode(self, s): """Decodes a single string to a list of strings. :type s: str :rtype: List[str] """ i, str = 0, [] while i < len(s): sharp = s.find("#", i) l = int(s[i:sharp]) str.append(s[sharp + 1:sharp + l + 1]) i = sharp + l + 1 return str # Your Codec object will be instantiated and called as such: # codec = Codec() # codec.decode(codec.encode(strs)) ### Test case : dev # V1' # https://medium.com/leetcode-%E6%BC%94%E7%AE%97%E6%B3%95%E6%95%99%E5%AD%B8/024-leetcode-271-%E6%BC%94%E7%AE%97%E6%B3%95-encode-and-decode-strings-%E5%AD%97%E4%B8%B2%E5%8A%A0%E8%A7%A3%E5%AF%86-722cafd6238 # IDEA : # ABC -> 3/ABC # ABCD -> 4/ABCD # A B C D ->1/A1/B1/C1/D # # JAVA # // Encodes a list of strings to a single string. # public String encode(List<String> strs) { # StringBuilder sb = new StringBuilder(); # for(String s : strs) { # sb.append(s.length()).append('/').append(s); # } # return sb.toString(); # } # # // Decodes a single string to a list of strings. # public List<String> decode(String s) { # List<String> ret = new ArrayList<String>(); # int i = 0; # while(i < s.length()) { # int slash = s.indexOf('/', i);// return the 1st '/' index from i # int size = Integer.valueOf(s.substring(i, slash)); // the length of encode # ret.add(s.substring(slash + 1, slash + size + 1)); // cut it off # i = slash + size + 1;// redefine the i index # } # return ret; # } # V2 # Time: O(n) # Space: O(1) class Codec(object): def encode(self, strs): """Encodes a list of strings to a single string. :type strs: List[str] :rtype: str """ encoded_str = "" for s in strs: encoded_str += "%0*x" % (8, len(s)) + s return encoded_str def decode(self, s): """Decodes a single string to a list of strings. :type s: str :rtype: List[str] """ i = 0 strs = [] while i < len(s): l = int(s[i:i+8], 16) strs.append(s[i+8:i+8+l]) i += 8+l return strs
normal
{ "blob_id": "b94392c9c6547415326d80ff0923cb8ba9251783", "index": 5724, "step-1": "<mask token>\n\n\nclass Codec:\n <mask token>\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n \n :type s: str\n :rtype: List[str]\n \"\"\"\n i, str = 0, []\n while i < len(s):\n sharp = s.find('#', i)\n l = int(s[i:sharp])\n str.append(s[sharp + 1:sharp + l + 1])\n i = sharp + l + 1\n return str\n\n\nclass Codec(object):\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n :type strs: List[str]\n :rtype: str\n \"\"\"\n encoded_str = ''\n for s in strs:\n encoded_str += '%0*x' % (8, len(s)) + s\n return encoded_str\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n :type s: str\n :rtype: List[str]\n \"\"\"\n i = 0\n strs = []\n while i < len(s):\n l = int(s[i:i + 8], 16)\n strs.append(s[i + 8:i + 8 + l])\n i += 8 + l\n return strs\n", "step-2": "<mask token>\n\n\nclass Codec:\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n \n :type strs: List[str]\n :rtype: str\n \"\"\"\n s = ''\n for i in strs:\n s += str(len(i)) + '#' + i\n return s\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n \n :type s: str\n :rtype: List[str]\n \"\"\"\n i, str = 0, []\n while i < len(s):\n sharp = s.find('#', i)\n l = int(s[i:sharp])\n str.append(s[sharp + 1:sharp + l + 1])\n i = sharp + l + 1\n return str\n\n\nclass Codec(object):\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n :type strs: List[str]\n :rtype: str\n \"\"\"\n encoded_str = ''\n for s in strs:\n encoded_str += '%0*x' % (8, len(s)) + s\n return encoded_str\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n :type s: str\n :rtype: List[str]\n \"\"\"\n i = 0\n strs = []\n while i < len(s):\n l = int(s[i:i + 8], 16)\n strs.append(s[i + 8:i + 8 + l])\n i += 8 + l\n return strs\n", "step-3": "class Codec:\n <mask token>\n <mask token>\n\n\nclass Codec:\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n \n :type strs: List[str]\n :rtype: str\n \"\"\"\n s = ''\n for i in strs:\n s += str(len(i)) + '#' + i\n return s\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n \n :type s: str\n :rtype: List[str]\n \"\"\"\n i, str = 0, []\n while i < len(s):\n sharp = s.find('#', i)\n l = int(s[i:sharp])\n str.append(s[sharp + 1:sharp + l + 1])\n i = sharp + l + 1\n return str\n\n\nclass Codec(object):\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n :type strs: List[str]\n :rtype: str\n \"\"\"\n encoded_str = ''\n for s in strs:\n encoded_str += '%0*x' % (8, len(s)) + s\n return encoded_str\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n :type s: str\n :rtype: List[str]\n \"\"\"\n i = 0\n strs = []\n while i < len(s):\n l = int(s[i:i + 8], 16)\n strs.append(s[i + 8:i + 8 + l])\n i += 8 + l\n return strs\n", "step-4": "class Codec:\n\n def encode(self, strs):\n s = ''\n for i in strs:\n s += str(len(i)) + '#' + i\n return s\n <mask token>\n\n\nclass Codec:\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n \n :type strs: List[str]\n :rtype: str\n \"\"\"\n s = ''\n for i in strs:\n s += str(len(i)) + '#' + i\n return s\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n \n :type s: str\n :rtype: List[str]\n \"\"\"\n i, str = 0, []\n while i < len(s):\n sharp = s.find('#', i)\n l = int(s[i:sharp])\n str.append(s[sharp + 1:sharp + l + 1])\n i = sharp + l + 1\n return str\n\n\nclass Codec(object):\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n :type strs: List[str]\n :rtype: str\n \"\"\"\n encoded_str = ''\n for s in strs:\n encoded_str += '%0*x' % (8, len(s)) + s\n return encoded_str\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n :type s: str\n :rtype: List[str]\n \"\"\"\n i = 0\n strs = []\n while i < len(s):\n l = int(s[i:i + 8], 16)\n strs.append(s[i + 8:i + 8 + l])\n i += 8 + l\n return strs\n", "step-5": "# V0 \nclass Codec:\n def encode(self, strs):\n s = \"\"\n for i in strs:\n s += str(len(i)) + \"#\" + i\n return s\n\n def decode(self, s):\n i, str = 0, []\n while i < len(s):\n sharp = s.find(\"#\", i)\n l = int(s[i:sharp])\n str.append(s[sharp + 1:sharp + l + 1])\n i = sharp + l + 1\n return str\n\n# V1 \n# http://www.voidcn.com/article/p-hpbzcdjd-zo.html\nclass Codec:\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n \n :type strs: List[str]\n :rtype: str\n \"\"\"\n s = \"\"\n for i in strs:\n s += str(len(i)) + \"#\" + i\n return s\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n \n :type s: str\n :rtype: List[str]\n \"\"\"\n i, str = 0, []\n while i < len(s):\n sharp = s.find(\"#\", i)\n l = int(s[i:sharp])\n str.append(s[sharp + 1:sharp + l + 1])\n i = sharp + l + 1\n return str\n# Your Codec object will be instantiated and called as such:\n# codec = Codec()\n# codec.decode(codec.encode(strs))\n\n### Test case : dev \n\n# V1'\n# https://medium.com/leetcode-%E6%BC%94%E7%AE%97%E6%B3%95%E6%95%99%E5%AD%B8/024-leetcode-271-%E6%BC%94%E7%AE%97%E6%B3%95-encode-and-decode-strings-%E5%AD%97%E4%B8%B2%E5%8A%A0%E8%A7%A3%E5%AF%86-722cafd6238\n# IDEA :\n# ABC -> 3/ABC \n# ABCD -> 4/ABCD\n# A B C D ->1/A1/B1/C1/D\n#\n# JAVA\n# // Encodes a list of strings to a single string.\n# public String encode(List<String> strs) {\n# StringBuilder sb = new StringBuilder();\n# for(String s : strs) {\n# sb.append(s.length()).append('/').append(s);\n# }\n# return sb.toString();\n# }\n#\n# // Decodes a single string to a list of strings.\n# public List<String> decode(String s) {\n# List<String> ret = new ArrayList<String>();\n# int i = 0;\n# while(i < s.length()) {\n# int slash = s.indexOf('/', i);// return the 1st '/' index from i\n# int size = Integer.valueOf(s.substring(i, slash)); // the length of encode\n# ret.add(s.substring(slash + 1, slash + size + 1)); // cut it off\n# i = slash + size + 1;// redefine the i index \n# }\n# return ret;\n# }\n\n# V2 \n# Time: O(n)\n# Space: O(1)\nclass Codec(object):\n\n def encode(self, strs):\n \"\"\"Encodes a list of strings to a single string.\n :type strs: List[str]\n :rtype: str\n \"\"\"\n encoded_str = \"\"\n for s in strs:\n encoded_str += \"%0*x\" % (8, len(s)) + s\n return encoded_str\n\n\n def decode(self, s):\n \"\"\"Decodes a single string to a list of strings.\n :type s: str\n :rtype: List[str]\n \"\"\"\n i = 0\n strs = []\n while i < len(s):\n l = int(s[i:i+8], 16)\n strs.append(s[i+8:i+8+l])\n i += 8+l\n return strs", "step-ids": [ 5, 6, 7, 8, 10 ] }
[ 5, 6, 7, 8, 10 ]
import uuid from datetime import date import os import humanize class Context: def __init__(self, function_name, function_version): self.function_name = function_name self.function_version = function_version self.invoked_function_arn = "arn:aws:lambda:eu-north-1:000000000000:function:{}".format(self.function_name) self.aws_request_id = uuid.uuid1() self.log_group_name = "/aws/lambda/{}".format(self.function_name) today = date.today() self.log_stream_name = "{}/[{}]4459c970fa6d4c77aca62c95850fce54".format(today.strftime("%Y/%m/%d"), self.function_version) self.memory_limit_in_mb = Context.memory(self) pass def memory(self): mem = int(os.popen("cat /sys/fs/cgroup/memory/memory.limit_in_bytes").read()) self.memory_limit_in_mb = humanize.naturalsize(mem, gnu=True) return (self.memory_limit_in_mb) pass
normal
{ "blob_id": "1c685514f53a320226402a4e4d8f3b3187fad615", "index": 7814, "step-1": "<mask token>\n\n\nclass Context:\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Context:\n\n def __init__(self, function_name, function_version):\n self.function_name = function_name\n self.function_version = function_version\n self.invoked_function_arn = (\n 'arn:aws:lambda:eu-north-1:000000000000:function:{}'.format(\n self.function_name))\n self.aws_request_id = uuid.uuid1()\n self.log_group_name = '/aws/lambda/{}'.format(self.function_name)\n today = date.today()\n self.log_stream_name = ('{}/[{}]4459c970fa6d4c77aca62c95850fce54'.\n format(today.strftime('%Y/%m/%d'), self.function_version))\n self.memory_limit_in_mb = Context.memory(self)\n pass\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Context:\n\n def __init__(self, function_name, function_version):\n self.function_name = function_name\n self.function_version = function_version\n self.invoked_function_arn = (\n 'arn:aws:lambda:eu-north-1:000000000000:function:{}'.format(\n self.function_name))\n self.aws_request_id = uuid.uuid1()\n self.log_group_name = '/aws/lambda/{}'.format(self.function_name)\n today = date.today()\n self.log_stream_name = ('{}/[{}]4459c970fa6d4c77aca62c95850fce54'.\n format(today.strftime('%Y/%m/%d'), self.function_version))\n self.memory_limit_in_mb = Context.memory(self)\n pass\n\n def memory(self):\n mem = int(os.popen(\n 'cat /sys/fs/cgroup/memory/memory.limit_in_bytes').read())\n self.memory_limit_in_mb = humanize.naturalsize(mem, gnu=True)\n return self.memory_limit_in_mb\n pass\n", "step-4": "import uuid\nfrom datetime import date\nimport os\nimport humanize\n\n\nclass Context:\n\n def __init__(self, function_name, function_version):\n self.function_name = function_name\n self.function_version = function_version\n self.invoked_function_arn = (\n 'arn:aws:lambda:eu-north-1:000000000000:function:{}'.format(\n self.function_name))\n self.aws_request_id = uuid.uuid1()\n self.log_group_name = '/aws/lambda/{}'.format(self.function_name)\n today = date.today()\n self.log_stream_name = ('{}/[{}]4459c970fa6d4c77aca62c95850fce54'.\n format(today.strftime('%Y/%m/%d'), self.function_version))\n self.memory_limit_in_mb = Context.memory(self)\n pass\n\n def memory(self):\n mem = int(os.popen(\n 'cat /sys/fs/cgroup/memory/memory.limit_in_bytes').read())\n self.memory_limit_in_mb = humanize.naturalsize(mem, gnu=True)\n return self.memory_limit_in_mb\n pass\n", "step-5": "import uuid\nfrom datetime import date\nimport os\nimport humanize\n\n\nclass Context:\n def __init__(self, function_name, function_version):\n self.function_name = function_name\n self.function_version = function_version\n self.invoked_function_arn = \"arn:aws:lambda:eu-north-1:000000000000:function:{}\".format(self.function_name)\n self.aws_request_id = uuid.uuid1()\n self.log_group_name = \"/aws/lambda/{}\".format(self.function_name)\n today = date.today()\n self.log_stream_name = \"{}/[{}]4459c970fa6d4c77aca62c95850fce54\".format(today.strftime(\"%Y/%m/%d\"), self.function_version)\n self.memory_limit_in_mb = Context.memory(self)\n pass\n\n def memory(self):\n mem = int(os.popen(\"cat /sys/fs/cgroup/memory/memory.limit_in_bytes\").read())\n self.memory_limit_in_mb = humanize.naturalsize(mem, gnu=True)\n return (self.memory_limit_in_mb)\n pass\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class MeasurementsSerializer(serializers.ModelSerializer): class Meta: model = Measurements fields = '__all__' <|reserved_special_token_0|> class CountSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) value = serializers.IntegerField() class OperatingSystemSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='versionname', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class VendorsSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='devicemanufacturer', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class GlobalSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) avg = serializers.IntegerField() min = serializers.IntegerField() max = serializers.IntegerField() <|reserved_special_token_1|> <|reserved_special_token_0|> class MeasurementsSerializer(serializers.ModelSerializer): class Meta: model = Measurements fields = '__all__' def __init__(self, *args, **kwargs): super(MeasurementsSerializer, self).__init__(*args, **kwargs) request = self.context.get('request') if request and request.query_params.get('fields'): fields = request.query_params.get('fields') if fields: fields = fields.split(',') allowed = set(fields) existing = set(self.fields.keys()) for field_name in (existing - allowed): self.fields.pop(field_name) class CountSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) value = serializers.IntegerField() class OperatingSystemSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='versionname', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class VendorsSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='devicemanufacturer', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class GlobalSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) avg = serializers.IntegerField() min = serializers.IntegerField() max = serializers.IntegerField() <|reserved_special_token_1|> <|reserved_special_token_0|> class V2OfUsersSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = V2OfUsers fields = 'firstname', 'lastname', 'username', 'email', 'password' extra_kwargs = {'password': {'write_only': True}} <|reserved_special_token_0|> class MeasurementsSerializer(serializers.ModelSerializer): class Meta: model = Measurements fields = '__all__' def __init__(self, *args, **kwargs): super(MeasurementsSerializer, self).__init__(*args, **kwargs) request = self.context.get('request') if request and request.query_params.get('fields'): fields = request.query_params.get('fields') if fields: fields = fields.split(',') allowed = set(fields) existing = set(self.fields.keys()) for field_name in (existing - allowed): self.fields.pop(field_name) class CountSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) value = serializers.IntegerField() class OperatingSystemSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='versionname', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class VendorsSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='devicemanufacturer', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class GlobalSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) avg = serializers.IntegerField() min = serializers.IntegerField() max = serializers.IntegerField() <|reserved_special_token_1|> <|reserved_special_token_0|> class V2OfUsersSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = V2OfUsers fields = 'firstname', 'lastname', 'username', 'email', 'password' extra_kwargs = {'password': {'write_only': True}} def create(self, validated_data): user = User(email=validated_data['email'], username=validated_data[ 'username']) user.set_password(validated_data['password']) user.save() Token.objects.create(user=user) return user class MeasurementsSerializer(serializers.ModelSerializer): class Meta: model = Measurements fields = '__all__' def __init__(self, *args, **kwargs): super(MeasurementsSerializer, self).__init__(*args, **kwargs) request = self.context.get('request') if request and request.query_params.get('fields'): fields = request.query_params.get('fields') if fields: fields = fields.split(',') allowed = set(fields) existing = set(self.fields.keys()) for field_name in (existing - allowed): self.fields.pop(field_name) class CountSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) value = serializers.IntegerField() class OperatingSystemSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='versionname', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class VendorsSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='devicemanufacturer', max_length=30) class Meta: model = Measurements fields = 'key', 'value' class GlobalSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) avg = serializers.IntegerField() min = serializers.IntegerField() max = serializers.IntegerField() <|reserved_special_token_1|> # myapp/serializers.py from rest_framework import serializers from rest_framework.authtoken.models import Token from .models import * # Serializers define the API representation. class GeneralSerializer(serializers.ModelSerializer): class Meta: model = None fields = '__all__' class V2OfUsersSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = V2OfUsers fields = ('firstname', 'lastname', 'username', 'email', 'password') extra_kwargs = {'password': {'write_only': True}} def create(self, validated_data): user = User( email=validated_data['email'], username=validated_data['username'] ) user.set_password(validated_data['password']) user.save() Token.objects.create(user=user) return user class MeasurementsSerializer(serializers.ModelSerializer): class Meta: model = Measurements fields = '__all__' def __init__(self, *args, **kwargs): super(MeasurementsSerializer, self).__init__(*args, **kwargs) request = self.context.get("request") if request and request.query_params.get('fields'): fields = request.query_params.get('fields') if fields: fields = fields.split(',') allowed = set(fields) existing = set(self.fields.keys()) for field_name in existing - allowed: self.fields.pop(field_name) # Serializer for Counting Providers # and Network Type e.g 2G, 3G, 4G class CountSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) value = serializers.IntegerField() # Serializer for Mobile Operating System class OperatingSystemSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='versionname', max_length=30) class Meta: model = Measurements fields = ('key', 'value') # Serializer for Vendors class VendorsSerializer(serializers.ModelSerializer): value = serializers.CharField(max_length=30) key = serializers.CharField(source='devicemanufacturer', max_length=30) class Meta: model = Measurements fields = ('key', 'value') # General Serializer for DownLink and UpLink for all # Providers and Network Types with date range parameters class GlobalSerializer(serializers.Serializer): key = serializers.CharField(max_length=20) avg = serializers.IntegerField() min = serializers.IntegerField() max = serializers.IntegerField()
flexible
{ "blob_id": "44cbe1face91d3ac7edcd93d0b470bce90c8b674", "index": 2916, "step-1": "<mask token>\n\n\nclass MeasurementsSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Measurements\n fields = '__all__'\n <mask token>\n\n\nclass CountSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n value = serializers.IntegerField()\n\n\nclass OperatingSystemSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='versionname', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass VendorsSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='devicemanufacturer', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass GlobalSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n avg = serializers.IntegerField()\n min = serializers.IntegerField()\n max = serializers.IntegerField()\n", "step-2": "<mask token>\n\n\nclass MeasurementsSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Measurements\n fields = '__all__'\n\n def __init__(self, *args, **kwargs):\n super(MeasurementsSerializer, self).__init__(*args, **kwargs)\n request = self.context.get('request')\n if request and request.query_params.get('fields'):\n fields = request.query_params.get('fields')\n if fields:\n fields = fields.split(',')\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in (existing - allowed):\n self.fields.pop(field_name)\n\n\nclass CountSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n value = serializers.IntegerField()\n\n\nclass OperatingSystemSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='versionname', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass VendorsSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='devicemanufacturer', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass GlobalSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n avg = serializers.IntegerField()\n min = serializers.IntegerField()\n max = serializers.IntegerField()\n", "step-3": "<mask token>\n\n\nclass V2OfUsersSerializer(serializers.HyperlinkedModelSerializer):\n\n\n class Meta:\n model = V2OfUsers\n fields = 'firstname', 'lastname', 'username', 'email', 'password'\n extra_kwargs = {'password': {'write_only': True}}\n <mask token>\n\n\nclass MeasurementsSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Measurements\n fields = '__all__'\n\n def __init__(self, *args, **kwargs):\n super(MeasurementsSerializer, self).__init__(*args, **kwargs)\n request = self.context.get('request')\n if request and request.query_params.get('fields'):\n fields = request.query_params.get('fields')\n if fields:\n fields = fields.split(',')\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in (existing - allowed):\n self.fields.pop(field_name)\n\n\nclass CountSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n value = serializers.IntegerField()\n\n\nclass OperatingSystemSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='versionname', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass VendorsSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='devicemanufacturer', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass GlobalSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n avg = serializers.IntegerField()\n min = serializers.IntegerField()\n max = serializers.IntegerField()\n", "step-4": "<mask token>\n\n\nclass V2OfUsersSerializer(serializers.HyperlinkedModelSerializer):\n\n\n class Meta:\n model = V2OfUsers\n fields = 'firstname', 'lastname', 'username', 'email', 'password'\n extra_kwargs = {'password': {'write_only': True}}\n\n def create(self, validated_data):\n user = User(email=validated_data['email'], username=validated_data[\n 'username'])\n user.set_password(validated_data['password'])\n user.save()\n Token.objects.create(user=user)\n return user\n\n\nclass MeasurementsSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Measurements\n fields = '__all__'\n\n def __init__(self, *args, **kwargs):\n super(MeasurementsSerializer, self).__init__(*args, **kwargs)\n request = self.context.get('request')\n if request and request.query_params.get('fields'):\n fields = request.query_params.get('fields')\n if fields:\n fields = fields.split(',')\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in (existing - allowed):\n self.fields.pop(field_name)\n\n\nclass CountSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n value = serializers.IntegerField()\n\n\nclass OperatingSystemSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='versionname', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass VendorsSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='devicemanufacturer', max_length=30)\n\n\n class Meta:\n model = Measurements\n fields = 'key', 'value'\n\n\nclass GlobalSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n avg = serializers.IntegerField()\n min = serializers.IntegerField()\n max = serializers.IntegerField()\n", "step-5": "# myapp/serializers.py\nfrom rest_framework import serializers\nfrom rest_framework.authtoken.models import Token\nfrom .models import *\n\n\n# Serializers define the API representation.\nclass GeneralSerializer(serializers.ModelSerializer):\n class Meta:\n model = None\n fields = '__all__'\n\n\nclass V2OfUsersSerializer(serializers.HyperlinkedModelSerializer):\n class Meta:\n model = V2OfUsers\n fields = ('firstname', 'lastname', 'username', 'email', 'password')\n extra_kwargs = {'password': {'write_only': True}}\n\n def create(self, validated_data):\n user = User(\n email=validated_data['email'],\n username=validated_data['username']\n )\n user.set_password(validated_data['password'])\n user.save()\n Token.objects.create(user=user)\n return user\n\n\nclass MeasurementsSerializer(serializers.ModelSerializer):\n class Meta:\n model = Measurements\n fields = '__all__'\n\n def __init__(self, *args, **kwargs):\n super(MeasurementsSerializer, self).__init__(*args, **kwargs)\n request = self.context.get(\"request\")\n if request and request.query_params.get('fields'):\n fields = request.query_params.get('fields')\n if fields:\n fields = fields.split(',')\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in existing - allowed:\n self.fields.pop(field_name)\n\n\n# Serializer for Counting Providers\n# and Network Type e.g 2G, 3G, 4G\n\n\nclass CountSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n value = serializers.IntegerField()\n\n# Serializer for Mobile Operating System\n\n\nclass OperatingSystemSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='versionname', max_length=30)\n\n class Meta:\n model = Measurements\n fields = ('key', 'value')\n\n\n# Serializer for Vendors\n\n\nclass VendorsSerializer(serializers.ModelSerializer):\n value = serializers.CharField(max_length=30)\n key = serializers.CharField(source='devicemanufacturer', max_length=30)\n\n class Meta:\n model = Measurements\n fields = ('key', 'value')\n\n\n# General Serializer for DownLink and UpLink for all\n# Providers and Network Types with date range parameters\n\nclass GlobalSerializer(serializers.Serializer):\n key = serializers.CharField(max_length=20)\n avg = serializers.IntegerField()\n min = serializers.IntegerField()\n max = serializers.IntegerField()\n", "step-ids": [ 9, 10, 11, 12, 15 ] }
[ 9, 10, 11, 12, 15 ]
#!/usr/bin/env python3 import argparse import json import os import random import timeit from glob import glob import numpy as np def parse_args(): """[summary] Returns: [type]: [description] """ parser = argparse.ArgumentParser() parser.add_argument('--train_dir', help='directory containing spacenet7 train dataset', default='/data/spacenet7/spacenet7/train/') parser.add_argument('--mask_dir', help='directory containing building mask image files', default='/data/spacenet7/building_masks/') parser.add_argument('--out_dir', help='output root directory', default='/data/spacenet7/split/') parser.add_argument('--split_num', help='number of split', type=int, default=5) return parser.parse_args() def dump_file_paths(aois, output_path, train_dir, mask_dir): """[summary] Args: aois ([type]): [description] output_path ([type]): [description] train_dir ([type]): [description] mask_dir ([type]): [description] """ results = [] for aoi in aois: image_paths = glob( os.path.join(train_dir, aoi, 'images_masked', '*.tif')) image_paths.sort() N = len(image_paths) for i in range(N): # get path to mask image_path = image_paths[i] filename = os.path.basename(image_path) mask_path = os.path.join(mask_dir, aoi, filename) assert os.path.exists(mask_path) # previous frame image_prev_path = image_paths[0] if i == 0 \ else image_paths[i - 1] # next frame image_next_path = image_paths[N - 1] if i == N - 1 \ else image_paths[i + 1] result = {} result['image_masked'] = image_path result['building_mask'] = mask_path result['image_masked_prev'] = image_prev_path result['image_masked_next'] = image_next_path results.append(result) with open(output_path, 'w') as f: json.dump(results, f, ensure_ascii=False, indent=4, sort_keys=False, separators=(',', ': ')) if __name__ == '__main__': t0 = timeit.default_timer() args = parse_args() os.makedirs(args.out_dir) aois = sorted([ d for d in os.listdir(args.train_dir) if os.path.isdir(os.path.join(args.train_dir, d)) ]) random.seed(777) random.shuffle(aois) # split aois into train and val n = args.split_num aois_divided = np.array([aois[i::n] for i in range(n)]) for val_idx in range(n): # dump file paths for val split val_aois = aois_divided[val_idx] dump_file_paths(val_aois, os.path.join(args.out_dir, f'val_{val_idx}.json'), args.train_dir, args.mask_dir) # dump file paths for train split train_mask = np.ones(n, dtype=bool) train_mask[val_idx] = False train_aois = aois_divided[train_mask] train_aois = np.concatenate(train_aois, axis=0).tolist() dump_file_paths(train_aois, os.path.join(args.out_dir, f'train_{val_idx}.json'), args.train_dir, args.mask_dir) elapsed = timeit.default_timer() - t0 print('Time: {:.3f} min'.format(elapsed / 60.0))
normal
{ "blob_id": "71eadf5073b5ed13c7d4a58b2aeb52f550a32238", "index": 3104, "step-1": "<mask token>\n\n\ndef parse_args():\n \"\"\"[summary]\n\n Returns:\n [type]: [description]\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument('--train_dir', help=\n 'directory containing spacenet7 train dataset', default=\n '/data/spacenet7/spacenet7/train/')\n parser.add_argument('--mask_dir', help=\n 'directory containing building mask image files', default=\n '/data/spacenet7/building_masks/')\n parser.add_argument('--out_dir', help='output root directory', default=\n '/data/spacenet7/split/')\n parser.add_argument('--split_num', help='number of split', type=int,\n default=5)\n return parser.parse_args()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef parse_args():\n \"\"\"[summary]\n\n Returns:\n [type]: [description]\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument('--train_dir', help=\n 'directory containing spacenet7 train dataset', default=\n '/data/spacenet7/spacenet7/train/')\n parser.add_argument('--mask_dir', help=\n 'directory containing building mask image files', default=\n '/data/spacenet7/building_masks/')\n parser.add_argument('--out_dir', help='output root directory', default=\n '/data/spacenet7/split/')\n parser.add_argument('--split_num', help='number of split', type=int,\n default=5)\n return parser.parse_args()\n\n\ndef dump_file_paths(aois, output_path, train_dir, mask_dir):\n \"\"\"[summary]\n\n Args:\n aois ([type]): [description]\n output_path ([type]): [description]\n train_dir ([type]): [description]\n mask_dir ([type]): [description]\n \"\"\"\n results = []\n for aoi in aois:\n image_paths = glob(os.path.join(train_dir, aoi, 'images_masked',\n '*.tif'))\n image_paths.sort()\n N = len(image_paths)\n for i in range(N):\n image_path = image_paths[i]\n filename = os.path.basename(image_path)\n mask_path = os.path.join(mask_dir, aoi, filename)\n assert os.path.exists(mask_path)\n image_prev_path = image_paths[0] if i == 0 else image_paths[i - 1]\n image_next_path = image_paths[N - 1\n ] if i == N - 1 else image_paths[i + 1]\n result = {}\n result['image_masked'] = image_path\n result['building_mask'] = mask_path\n result['image_masked_prev'] = image_prev_path\n result['image_masked_next'] = image_next_path\n results.append(result)\n with open(output_path, 'w') as f:\n json.dump(results, f, ensure_ascii=False, indent=4, sort_keys=False,\n separators=(',', ': '))\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef parse_args():\n \"\"\"[summary]\n\n Returns:\n [type]: [description]\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument('--train_dir', help=\n 'directory containing spacenet7 train dataset', default=\n '/data/spacenet7/spacenet7/train/')\n parser.add_argument('--mask_dir', help=\n 'directory containing building mask image files', default=\n '/data/spacenet7/building_masks/')\n parser.add_argument('--out_dir', help='output root directory', default=\n '/data/spacenet7/split/')\n parser.add_argument('--split_num', help='number of split', type=int,\n default=5)\n return parser.parse_args()\n\n\ndef dump_file_paths(aois, output_path, train_dir, mask_dir):\n \"\"\"[summary]\n\n Args:\n aois ([type]): [description]\n output_path ([type]): [description]\n train_dir ([type]): [description]\n mask_dir ([type]): [description]\n \"\"\"\n results = []\n for aoi in aois:\n image_paths = glob(os.path.join(train_dir, aoi, 'images_masked',\n '*.tif'))\n image_paths.sort()\n N = len(image_paths)\n for i in range(N):\n image_path = image_paths[i]\n filename = os.path.basename(image_path)\n mask_path = os.path.join(mask_dir, aoi, filename)\n assert os.path.exists(mask_path)\n image_prev_path = image_paths[0] if i == 0 else image_paths[i - 1]\n image_next_path = image_paths[N - 1\n ] if i == N - 1 else image_paths[i + 1]\n result = {}\n result['image_masked'] = image_path\n result['building_mask'] = mask_path\n result['image_masked_prev'] = image_prev_path\n result['image_masked_next'] = image_next_path\n results.append(result)\n with open(output_path, 'w') as f:\n json.dump(results, f, ensure_ascii=False, indent=4, sort_keys=False,\n separators=(',', ': '))\n\n\nif __name__ == '__main__':\n t0 = timeit.default_timer()\n args = parse_args()\n os.makedirs(args.out_dir)\n aois = sorted([d for d in os.listdir(args.train_dir) if os.path.isdir(\n os.path.join(args.train_dir, d))])\n random.seed(777)\n random.shuffle(aois)\n n = args.split_num\n aois_divided = np.array([aois[i::n] for i in range(n)])\n for val_idx in range(n):\n val_aois = aois_divided[val_idx]\n dump_file_paths(val_aois, os.path.join(args.out_dir,\n f'val_{val_idx}.json'), args.train_dir, args.mask_dir)\n train_mask = np.ones(n, dtype=bool)\n train_mask[val_idx] = False\n train_aois = aois_divided[train_mask]\n train_aois = np.concatenate(train_aois, axis=0).tolist()\n dump_file_paths(train_aois, os.path.join(args.out_dir,\n f'train_{val_idx}.json'), args.train_dir, args.mask_dir)\n elapsed = timeit.default_timer() - t0\n print('Time: {:.3f} min'.format(elapsed / 60.0))\n", "step-4": "import argparse\nimport json\nimport os\nimport random\nimport timeit\nfrom glob import glob\nimport numpy as np\n\n\ndef parse_args():\n \"\"\"[summary]\n\n Returns:\n [type]: [description]\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument('--train_dir', help=\n 'directory containing spacenet7 train dataset', default=\n '/data/spacenet7/spacenet7/train/')\n parser.add_argument('--mask_dir', help=\n 'directory containing building mask image files', default=\n '/data/spacenet7/building_masks/')\n parser.add_argument('--out_dir', help='output root directory', default=\n '/data/spacenet7/split/')\n parser.add_argument('--split_num', help='number of split', type=int,\n default=5)\n return parser.parse_args()\n\n\ndef dump_file_paths(aois, output_path, train_dir, mask_dir):\n \"\"\"[summary]\n\n Args:\n aois ([type]): [description]\n output_path ([type]): [description]\n train_dir ([type]): [description]\n mask_dir ([type]): [description]\n \"\"\"\n results = []\n for aoi in aois:\n image_paths = glob(os.path.join(train_dir, aoi, 'images_masked',\n '*.tif'))\n image_paths.sort()\n N = len(image_paths)\n for i in range(N):\n image_path = image_paths[i]\n filename = os.path.basename(image_path)\n mask_path = os.path.join(mask_dir, aoi, filename)\n assert os.path.exists(mask_path)\n image_prev_path = image_paths[0] if i == 0 else image_paths[i - 1]\n image_next_path = image_paths[N - 1\n ] if i == N - 1 else image_paths[i + 1]\n result = {}\n result['image_masked'] = image_path\n result['building_mask'] = mask_path\n result['image_masked_prev'] = image_prev_path\n result['image_masked_next'] = image_next_path\n results.append(result)\n with open(output_path, 'w') as f:\n json.dump(results, f, ensure_ascii=False, indent=4, sort_keys=False,\n separators=(',', ': '))\n\n\nif __name__ == '__main__':\n t0 = timeit.default_timer()\n args = parse_args()\n os.makedirs(args.out_dir)\n aois = sorted([d for d in os.listdir(args.train_dir) if os.path.isdir(\n os.path.join(args.train_dir, d))])\n random.seed(777)\n random.shuffle(aois)\n n = args.split_num\n aois_divided = np.array([aois[i::n] for i in range(n)])\n for val_idx in range(n):\n val_aois = aois_divided[val_idx]\n dump_file_paths(val_aois, os.path.join(args.out_dir,\n f'val_{val_idx}.json'), args.train_dir, args.mask_dir)\n train_mask = np.ones(n, dtype=bool)\n train_mask[val_idx] = False\n train_aois = aois_divided[train_mask]\n train_aois = np.concatenate(train_aois, axis=0).tolist()\n dump_file_paths(train_aois, os.path.join(args.out_dir,\n f'train_{val_idx}.json'), args.train_dir, args.mask_dir)\n elapsed = timeit.default_timer() - t0\n print('Time: {:.3f} min'.format(elapsed / 60.0))\n", "step-5": "#!/usr/bin/env python3\n\nimport argparse\nimport json\nimport os\nimport random\nimport timeit\nfrom glob import glob\n\nimport numpy as np\n\n\ndef parse_args():\n \"\"\"[summary]\n\n Returns:\n [type]: [description]\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument('--train_dir',\n help='directory containing spacenet7 train dataset',\n default='/data/spacenet7/spacenet7/train/')\n parser.add_argument('--mask_dir',\n help='directory containing building mask image files',\n default='/data/spacenet7/building_masks/')\n parser.add_argument('--out_dir',\n help='output root directory',\n default='/data/spacenet7/split/')\n parser.add_argument('--split_num',\n help='number of split',\n type=int,\n default=5)\n return parser.parse_args()\n\n\ndef dump_file_paths(aois, output_path, train_dir, mask_dir):\n \"\"\"[summary]\n\n Args:\n aois ([type]): [description]\n output_path ([type]): [description]\n train_dir ([type]): [description]\n mask_dir ([type]): [description]\n \"\"\"\n\n results = []\n\n for aoi in aois:\n image_paths = glob(\n os.path.join(train_dir, aoi, 'images_masked', '*.tif'))\n image_paths.sort()\n\n N = len(image_paths)\n for i in range(N):\n # get path to mask\n image_path = image_paths[i]\n filename = os.path.basename(image_path)\n mask_path = os.path.join(mask_dir, aoi, filename)\n assert os.path.exists(mask_path)\n\n # previous frame\n image_prev_path = image_paths[0] if i == 0 \\\n else image_paths[i - 1]\n\n # next frame\n image_next_path = image_paths[N - 1] if i == N - 1 \\\n else image_paths[i + 1]\n\n result = {}\n result['image_masked'] = image_path\n result['building_mask'] = mask_path\n result['image_masked_prev'] = image_prev_path\n result['image_masked_next'] = image_next_path\n results.append(result)\n\n with open(output_path, 'w') as f:\n json.dump(results,\n f,\n ensure_ascii=False,\n indent=4,\n sort_keys=False,\n separators=(',', ': '))\n\n\nif __name__ == '__main__':\n t0 = timeit.default_timer()\n\n args = parse_args()\n\n os.makedirs(args.out_dir)\n\n aois = sorted([\n d for d in os.listdir(args.train_dir)\n if os.path.isdir(os.path.join(args.train_dir, d))\n ])\n\n random.seed(777)\n random.shuffle(aois)\n\n # split aois into train and val\n n = args.split_num\n aois_divided = np.array([aois[i::n] for i in range(n)])\n\n for val_idx in range(n):\n # dump file paths for val split\n val_aois = aois_divided[val_idx]\n\n dump_file_paths(val_aois,\n os.path.join(args.out_dir, f'val_{val_idx}.json'),\n args.train_dir, args.mask_dir)\n\n # dump file paths for train split\n train_mask = np.ones(n, dtype=bool)\n train_mask[val_idx] = False\n train_aois = aois_divided[train_mask]\n train_aois = np.concatenate(train_aois, axis=0).tolist()\n\n dump_file_paths(train_aois,\n os.path.join(args.out_dir, f'train_{val_idx}.json'),\n args.train_dir, args.mask_dir)\n\n elapsed = timeit.default_timer() - t0\n print('Time: {:.3f} min'.format(elapsed / 60.0))\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def lm_res(snps, gene, cov): res = pd.DataFrame(np.zeros([snps.shape[0], 2], dtype=np.float32)) res.index = snps.index res.columns = ['beta', 'pval'] for i in range(snps.shape[0]): X = pd.concat([snps.iloc[i,].T, cov.T], axis=1) X = X.apply(pd.to_numeric) X = sm.add_constant(X) est = sm.OLS(pd.to_numeric(gene.T.iloc[:, 0]), X).fit() res.iloc[i, 0] = est.params[1] res.iloc[i, 1] = est.pvalues[1] return res <|reserved_special_token_1|> <|reserved_special_token_0|> def process_input(gene_file, vcf_file, cov_file, gene, start_col, gene_col, chr_col, gene_start_col): all_gene = pd.read_csv(gene_file, sep='[\t,]', header=0) gene = all_gene.loc[all_gene[gene_col] == gene,] gene_start = int(gene.loc[:, gene_start_col]) chrom = int(gene.loc[:, chr_col]) gene = gene.iloc[:, start_col:gene.shape[1]] start = int(gene_start - 1000000.0) if start < 0: start = 0 end = int(start + 1000000.0) cmd = 'tabix ' + vcf_file + ' ' + str(chrom) + ':' + str(start ) + '-' + str(end) s = subprocess.check_output(cmd, shell=True) s = s.decode().strip() s = s.split('\n') gt = [] for i in s: gt.append(i.split('\t')) s1 = pd.DataFrame(gt) info = s1.iloc[:, 0:9] s1 = s1.drop([0, 1, 2, 3, 4, 5, 6, 7, 8], axis=1) s1.index = info.iloc[:, 2] s2 = pd.DataFrame() for i in s1.columns: s2[i] = s1[i].apply(lambda x: x.split(':')[1]) sample_ids = subprocess.check_output('/usr/local/bin/bcftools query -l {}' .format(vcf_file), shell=True).decode().strip().split() s2.columns = sample_ids s3 = s2[gene.columns] cov = pd.read_csv(cov_file, sep='\t', index_col=0, header=0) cov = cov[gene.columns] return gene, s3, cov <|reserved_special_token_0|> def lm_res(snps, gene, cov): res = pd.DataFrame(np.zeros([snps.shape[0], 2], dtype=np.float32)) res.index = snps.index res.columns = ['beta', 'pval'] for i in range(snps.shape[0]): X = pd.concat([snps.iloc[i,].T, cov.T], axis=1) X = X.apply(pd.to_numeric) X = sm.add_constant(X) est = sm.OLS(pd.to_numeric(gene.T.iloc[:, 0]), X).fit() res.iloc[i, 0] = est.params[1] res.iloc[i, 1] = est.pvalues[1] return res <|reserved_special_token_1|> import pandas as pd import subprocess import statsmodels.api as sm import numpy as np import math <|reserved_special_token_0|> def process_input(gene_file, vcf_file, cov_file, gene, start_col, gene_col, chr_col, gene_start_col): all_gene = pd.read_csv(gene_file, sep='[\t,]', header=0) gene = all_gene.loc[all_gene[gene_col] == gene,] gene_start = int(gene.loc[:, gene_start_col]) chrom = int(gene.loc[:, chr_col]) gene = gene.iloc[:, start_col:gene.shape[1]] start = int(gene_start - 1000000.0) if start < 0: start = 0 end = int(start + 1000000.0) cmd = 'tabix ' + vcf_file + ' ' + str(chrom) + ':' + str(start ) + '-' + str(end) s = subprocess.check_output(cmd, shell=True) s = s.decode().strip() s = s.split('\n') gt = [] for i in s: gt.append(i.split('\t')) s1 = pd.DataFrame(gt) info = s1.iloc[:, 0:9] s1 = s1.drop([0, 1, 2, 3, 4, 5, 6, 7, 8], axis=1) s1.index = info.iloc[:, 2] s2 = pd.DataFrame() for i in s1.columns: s2[i] = s1[i].apply(lambda x: x.split(':')[1]) sample_ids = subprocess.check_output('/usr/local/bin/bcftools query -l {}' .format(vcf_file), shell=True).decode().strip().split() s2.columns = sample_ids s3 = s2[gene.columns] cov = pd.read_csv(cov_file, sep='\t', index_col=0, header=0) cov = cov[gene.columns] return gene, s3, cov <|reserved_special_token_0|> def lm_res(snps, gene, cov): res = pd.DataFrame(np.zeros([snps.shape[0], 2], dtype=np.float32)) res.index = snps.index res.columns = ['beta', 'pval'] for i in range(snps.shape[0]): X = pd.concat([snps.iloc[i,].T, cov.T], axis=1) X = X.apply(pd.to_numeric) X = sm.add_constant(X) est = sm.OLS(pd.to_numeric(gene.T.iloc[:, 0]), X).fit() res.iloc[i, 0] = est.params[1] res.iloc[i, 1] = est.pvalues[1] return res <|reserved_special_token_1|> import pandas as pd import subprocess import statsmodels.api as sm import numpy as np import math ''' This function prcesses the gene file Output is a one-row file for a gene Each individual is in a column Input file must have rowname gene: gene ENSG ID of interest start_col: column number which the gene exp value starts gene_col: column name for the gene column gene_start_col: column name for the gene start position chr_col: column name for the gene chromosome ''' def process_input(gene_file, vcf_file, cov_file, gene, start_col, gene_col, chr_col, gene_start_col): all_gene = pd.read_csv(gene_file, sep='[\t,]', header=0) #sep='[\t,]' allows read in both , and tab delimited files''' gene=all_gene.loc[all_gene[gene_col]==gene,] gene_start=int(gene.loc[:,gene_start_col]) chrom=int(gene.loc[:,chr_col]) gene=gene.iloc[:,start_col:gene.shape[1]] start=int(gene_start-1e6) if start < 0:start = 0 end=int(start+1e6) cmd='tabix '+ vcf_file + ' ' + str(chrom) + ':' + str(start) + '-' + str(end) s = subprocess.check_output(cmd, shell=True) s = s.decode().strip() s = s.split('\n') gt=[] for i in s: gt.append(i.split('\t')) s1=pd.DataFrame(gt) info=s1.iloc[:,0:9] s1=s1.drop([0,1,2,3,4,5,6,7,8],axis=1) s1.index=info.iloc[:,2] s2= pd.DataFrame() for i in s1.columns: s2[i] = s1[i].apply(lambda x: x.split(':')[1]) sample_ids = subprocess.check_output('/usr/local/bin/bcftools query -l {}'.format(vcf_file), shell=True).decode().strip().split() s2.columns=sample_ids s3=s2[gene.columns] cov = pd.read_csv(cov_file, sep='\t', index_col=0, header=0) cov=cov[gene.columns] return gene, s3, cov '''This function takes the input from the previous function Fit linear model Return beta and pvalues for the SNPs ''' def lm_res(snps,gene,cov): res = pd.DataFrame(np.zeros([snps.shape[0],2], dtype=np.float32)) res.index=snps.index res.columns=['beta','pval'] for i in range(snps.shape[0]): X=pd.concat([snps.iloc[i,].T, cov.T], axis=1) X = X.apply(pd.to_numeric) X = sm.add_constant(X) est = sm.OLS(pd.to_numeric(gene.T.iloc[:,0]), X).fit() res.iloc[i,0]=est.params[1] res.iloc[i,1]=est.pvalues[1] return res
flexible
{ "blob_id": "2f64aac7032ac099870269659a84b8c7c38b2bf0", "index": 8385, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef lm_res(snps, gene, cov):\n res = pd.DataFrame(np.zeros([snps.shape[0], 2], dtype=np.float32))\n res.index = snps.index\n res.columns = ['beta', 'pval']\n for i in range(snps.shape[0]):\n X = pd.concat([snps.iloc[i,].T, cov.T], axis=1)\n X = X.apply(pd.to_numeric)\n X = sm.add_constant(X)\n est = sm.OLS(pd.to_numeric(gene.T.iloc[:, 0]), X).fit()\n res.iloc[i, 0] = est.params[1]\n res.iloc[i, 1] = est.pvalues[1]\n return res\n", "step-3": "<mask token>\n\n\ndef process_input(gene_file, vcf_file, cov_file, gene, start_col, gene_col,\n chr_col, gene_start_col):\n all_gene = pd.read_csv(gene_file, sep='[\\t,]', header=0)\n gene = all_gene.loc[all_gene[gene_col] == gene,]\n gene_start = int(gene.loc[:, gene_start_col])\n chrom = int(gene.loc[:, chr_col])\n gene = gene.iloc[:, start_col:gene.shape[1]]\n start = int(gene_start - 1000000.0)\n if start < 0:\n start = 0\n end = int(start + 1000000.0)\n cmd = 'tabix ' + vcf_file + ' ' + str(chrom) + ':' + str(start\n ) + '-' + str(end)\n s = subprocess.check_output(cmd, shell=True)\n s = s.decode().strip()\n s = s.split('\\n')\n gt = []\n for i in s:\n gt.append(i.split('\\t'))\n s1 = pd.DataFrame(gt)\n info = s1.iloc[:, 0:9]\n s1 = s1.drop([0, 1, 2, 3, 4, 5, 6, 7, 8], axis=1)\n s1.index = info.iloc[:, 2]\n s2 = pd.DataFrame()\n for i in s1.columns:\n s2[i] = s1[i].apply(lambda x: x.split(':')[1])\n sample_ids = subprocess.check_output('/usr/local/bin/bcftools query -l {}'\n .format(vcf_file), shell=True).decode().strip().split()\n s2.columns = sample_ids\n s3 = s2[gene.columns]\n cov = pd.read_csv(cov_file, sep='\\t', index_col=0, header=0)\n cov = cov[gene.columns]\n return gene, s3, cov\n\n\n<mask token>\n\n\ndef lm_res(snps, gene, cov):\n res = pd.DataFrame(np.zeros([snps.shape[0], 2], dtype=np.float32))\n res.index = snps.index\n res.columns = ['beta', 'pval']\n for i in range(snps.shape[0]):\n X = pd.concat([snps.iloc[i,].T, cov.T], axis=1)\n X = X.apply(pd.to_numeric)\n X = sm.add_constant(X)\n est = sm.OLS(pd.to_numeric(gene.T.iloc[:, 0]), X).fit()\n res.iloc[i, 0] = est.params[1]\n res.iloc[i, 1] = est.pvalues[1]\n return res\n", "step-4": "import pandas as pd\nimport subprocess\nimport statsmodels.api as sm\nimport numpy as np\nimport math\n<mask token>\n\n\ndef process_input(gene_file, vcf_file, cov_file, gene, start_col, gene_col,\n chr_col, gene_start_col):\n all_gene = pd.read_csv(gene_file, sep='[\\t,]', header=0)\n gene = all_gene.loc[all_gene[gene_col] == gene,]\n gene_start = int(gene.loc[:, gene_start_col])\n chrom = int(gene.loc[:, chr_col])\n gene = gene.iloc[:, start_col:gene.shape[1]]\n start = int(gene_start - 1000000.0)\n if start < 0:\n start = 0\n end = int(start + 1000000.0)\n cmd = 'tabix ' + vcf_file + ' ' + str(chrom) + ':' + str(start\n ) + '-' + str(end)\n s = subprocess.check_output(cmd, shell=True)\n s = s.decode().strip()\n s = s.split('\\n')\n gt = []\n for i in s:\n gt.append(i.split('\\t'))\n s1 = pd.DataFrame(gt)\n info = s1.iloc[:, 0:9]\n s1 = s1.drop([0, 1, 2, 3, 4, 5, 6, 7, 8], axis=1)\n s1.index = info.iloc[:, 2]\n s2 = pd.DataFrame()\n for i in s1.columns:\n s2[i] = s1[i].apply(lambda x: x.split(':')[1])\n sample_ids = subprocess.check_output('/usr/local/bin/bcftools query -l {}'\n .format(vcf_file), shell=True).decode().strip().split()\n s2.columns = sample_ids\n s3 = s2[gene.columns]\n cov = pd.read_csv(cov_file, sep='\\t', index_col=0, header=0)\n cov = cov[gene.columns]\n return gene, s3, cov\n\n\n<mask token>\n\n\ndef lm_res(snps, gene, cov):\n res = pd.DataFrame(np.zeros([snps.shape[0], 2], dtype=np.float32))\n res.index = snps.index\n res.columns = ['beta', 'pval']\n for i in range(snps.shape[0]):\n X = pd.concat([snps.iloc[i,].T, cov.T], axis=1)\n X = X.apply(pd.to_numeric)\n X = sm.add_constant(X)\n est = sm.OLS(pd.to_numeric(gene.T.iloc[:, 0]), X).fit()\n res.iloc[i, 0] = est.params[1]\n res.iloc[i, 1] = est.pvalues[1]\n return res\n", "step-5": "import pandas as pd\nimport subprocess\nimport statsmodels.api as sm\nimport numpy as np\nimport math\n\n'''\nThis function prcesses the gene file\nOutput is a one-row file for a gene\nEach individual is in a column\n\nInput file must have rowname\ngene: gene ENSG ID of interest\nstart_col: column number which the gene exp value starts\ngene_col: column name for the gene column\ngene_start_col: column name for the gene start position\nchr_col: column name for the gene chromosome\n'''\ndef process_input(gene_file, vcf_file, cov_file, gene, start_col, gene_col, chr_col, gene_start_col):\n all_gene = pd.read_csv(gene_file, sep='[\\t,]', header=0) #sep='[\\t,]' allows read in both , and tab delimited files'''\n gene=all_gene.loc[all_gene[gene_col]==gene,]\n \n gene_start=int(gene.loc[:,gene_start_col])\n chrom=int(gene.loc[:,chr_col])\n gene=gene.iloc[:,start_col:gene.shape[1]]\n \n start=int(gene_start-1e6)\n if start < 0:start = 0\n end=int(start+1e6)\n\n cmd='tabix '+ vcf_file + ' ' + str(chrom) + ':' + str(start) + '-' + str(end)\n s = subprocess.check_output(cmd, shell=True)\n s = s.decode().strip()\n s = s.split('\\n')\n gt=[]\n for i in s:\n gt.append(i.split('\\t')) \n s1=pd.DataFrame(gt)\n info=s1.iloc[:,0:9]\n s1=s1.drop([0,1,2,3,4,5,6,7,8],axis=1)\n s1.index=info.iloc[:,2]\n\n s2= pd.DataFrame()\n for i in s1.columns:\n s2[i] = s1[i].apply(lambda x: x.split(':')[1])\n\n sample_ids = subprocess.check_output('/usr/local/bin/bcftools query -l {}'.format(vcf_file), shell=True).decode().strip().split()\n s2.columns=sample_ids\n s3=s2[gene.columns]\n\n cov = pd.read_csv(cov_file, sep='\\t', index_col=0, header=0) \n cov=cov[gene.columns]\n\n return gene, s3, cov\n\n'''This function takes the input from the previous function\n Fit linear model \n Return beta and pvalues for the SNPs\n'''\ndef lm_res(snps,gene,cov):\n res = pd.DataFrame(np.zeros([snps.shape[0],2], dtype=np.float32))\n res.index=snps.index\n res.columns=['beta','pval'] \n\n for i in range(snps.shape[0]):\n X=pd.concat([snps.iloc[i,].T, cov.T], axis=1)\n X = X.apply(pd.to_numeric)\n X = sm.add_constant(X)\n est = sm.OLS(pd.to_numeric(gene.T.iloc[:,0]), X).fit()\n res.iloc[i,0]=est.params[1]\n res.iloc[i,1]=est.pvalues[1] \n return res\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from collections import deque def solution(play_time, adv_time, logs): ''' Strategy : adv_start_time을 log start time 부터 < 995959 - adv time sliding window Step 1. String time -> integer time Step 2. pseudo code : Two pointer algorithm max time = 0 return max time ''' ## Step 1. MAX = str2int(play_time) max_view = 0 ans_time = 0 adv_time = str2int(adv_time) logs = [[str2int(log.split("-")[0]),str2int(log.split("-")[1])] for log in logs] view_list = [0] * (MAX+1) ## Step 2. ## 도함수 for start_time,end_time in logs: view_list[start_time] += 1 view_list[end_time] -= 1 ## 함수 for i in range(1,MAX+1): view_list[i] = view_list[i]+view_list[i-1] ## 누적 합 for i in range(1,MAX+1): view_list[i] = view_list[i]+view_list[i-1] for start_time in range(MAX-adv_time+1): ## start time 0,1,2,... MAX-adv_time ## end time adv_time, ... MAX end_time = start_time + adv_time temp_view = view_list[end_time] - view_list[start_time] if temp_view > max_view: max_view = temp_view ans_time = start_time if ans_time != 0: ans_time += 1 return int2str(ans_time) def str2int(strtime:str): hh,mm,ss = strtime.split(":") return 3600*int(hh)+60*int(mm)+int(ss) def int2str(inttime:int): hh = inttime//3600 mm = (inttime%3600)//60 ss = inttime%60 return str(hh).zfill(2)+":"+str(mm).zfill(2)+":"+str(ss).zfill(2) if __name__ == "__main__": play_time = "02:03:55" adv_time = "00:14:15" logs = ["01:20:15-01:45:14", "00:25:50-00:48:29", "00:40:31-01:00:00", "01:37:44-02:02:30", "01:30:59-01:53:29"] result = "01:30:59" print(solution(play_time, adv_time, logs)) print(result) play_time = "99:59:59" adv_time = "25:00:00" logs = ["69:59:59-89:59:59", "01:00:00-21:00:00", "79:59:59-99:59:59", "11:00:00-31:00:00"] result = "01:00:00" print(solution(play_time, adv_time, logs)) print(result) play_time = "50:00:00" adv_time = "50:00:00" logs = ["15:36:51-38:21:49", "10:14:18-15:36:51", "38:21:49-42:51:45"] result = "00:00:00" print(solution(play_time, adv_time, logs)) print(result)
normal
{ "blob_id": "cb50a5352b0ad7b04dee9393c50da54fdf507376", "index": 2018, "step-1": "<mask token>\n\n\ndef str2int(strtime: str):\n hh, mm, ss = strtime.split(':')\n return 3600 * int(hh) + 60 * int(mm) + int(ss)\n\n\ndef int2str(inttime: int):\n hh = inttime // 3600\n mm = inttime % 3600 // 60\n ss = inttime % 60\n return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef solution(play_time, adv_time, logs):\n \"\"\"\n Strategy : \n adv_start_time을 log start time 부터 < 995959 - adv time\n sliding window \n\n Step 1. \n String time -> integer time\n\n Step 2. pseudo code : Two pointer algorithm\n max time = 0\n \n return max time\n \"\"\"\n MAX = str2int(play_time)\n max_view = 0\n ans_time = 0\n adv_time = str2int(adv_time)\n logs = [[str2int(log.split('-')[0]), str2int(log.split('-')[1])] for\n log in logs]\n view_list = [0] * (MAX + 1)\n for start_time, end_time in logs:\n view_list[start_time] += 1\n view_list[end_time] -= 1\n for i in range(1, MAX + 1):\n view_list[i] = view_list[i] + view_list[i - 1]\n for i in range(1, MAX + 1):\n view_list[i] = view_list[i] + view_list[i - 1]\n for start_time in range(MAX - adv_time + 1):\n end_time = start_time + adv_time\n temp_view = view_list[end_time] - view_list[start_time]\n if temp_view > max_view:\n max_view = temp_view\n ans_time = start_time\n if ans_time != 0:\n ans_time += 1\n return int2str(ans_time)\n\n\ndef str2int(strtime: str):\n hh, mm, ss = strtime.split(':')\n return 3600 * int(hh) + 60 * int(mm) + int(ss)\n\n\ndef int2str(inttime: int):\n hh = inttime // 3600\n mm = inttime % 3600 // 60\n ss = inttime % 60\n return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef solution(play_time, adv_time, logs):\n \"\"\"\n Strategy : \n adv_start_time을 log start time 부터 < 995959 - adv time\n sliding window \n\n Step 1. \n String time -> integer time\n\n Step 2. pseudo code : Two pointer algorithm\n max time = 0\n \n return max time\n \"\"\"\n MAX = str2int(play_time)\n max_view = 0\n ans_time = 0\n adv_time = str2int(adv_time)\n logs = [[str2int(log.split('-')[0]), str2int(log.split('-')[1])] for\n log in logs]\n view_list = [0] * (MAX + 1)\n for start_time, end_time in logs:\n view_list[start_time] += 1\n view_list[end_time] -= 1\n for i in range(1, MAX + 1):\n view_list[i] = view_list[i] + view_list[i - 1]\n for i in range(1, MAX + 1):\n view_list[i] = view_list[i] + view_list[i - 1]\n for start_time in range(MAX - adv_time + 1):\n end_time = start_time + adv_time\n temp_view = view_list[end_time] - view_list[start_time]\n if temp_view > max_view:\n max_view = temp_view\n ans_time = start_time\n if ans_time != 0:\n ans_time += 1\n return int2str(ans_time)\n\n\ndef str2int(strtime: str):\n hh, mm, ss = strtime.split(':')\n return 3600 * int(hh) + 60 * int(mm) + int(ss)\n\n\ndef int2str(inttime: int):\n hh = inttime // 3600\n mm = inttime % 3600 // 60\n ss = inttime % 60\n return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)\n\n\nif __name__ == '__main__':\n play_time = '02:03:55'\n adv_time = '00:14:15'\n logs = ['01:20:15-01:45:14', '00:25:50-00:48:29', '00:40:31-01:00:00',\n '01:37:44-02:02:30', '01:30:59-01:53:29']\n result = '01:30:59'\n print(solution(play_time, adv_time, logs))\n print(result)\n play_time = '99:59:59'\n adv_time = '25:00:00'\n logs = ['69:59:59-89:59:59', '01:00:00-21:00:00', '79:59:59-99:59:59',\n '11:00:00-31:00:00']\n result = '01:00:00'\n print(solution(play_time, adv_time, logs))\n print(result)\n play_time = '50:00:00'\n adv_time = '50:00:00'\n logs = ['15:36:51-38:21:49', '10:14:18-15:36:51', '38:21:49-42:51:45']\n result = '00:00:00'\n print(solution(play_time, adv_time, logs))\n print(result)\n", "step-4": "from collections import deque\n\n\ndef solution(play_time, adv_time, logs):\n \"\"\"\n Strategy : \n adv_start_time을 log start time 부터 < 995959 - adv time\n sliding window \n\n Step 1. \n String time -> integer time\n\n Step 2. pseudo code : Two pointer algorithm\n max time = 0\n \n return max time\n \"\"\"\n MAX = str2int(play_time)\n max_view = 0\n ans_time = 0\n adv_time = str2int(adv_time)\n logs = [[str2int(log.split('-')[0]), str2int(log.split('-')[1])] for\n log in logs]\n view_list = [0] * (MAX + 1)\n for start_time, end_time in logs:\n view_list[start_time] += 1\n view_list[end_time] -= 1\n for i in range(1, MAX + 1):\n view_list[i] = view_list[i] + view_list[i - 1]\n for i in range(1, MAX + 1):\n view_list[i] = view_list[i] + view_list[i - 1]\n for start_time in range(MAX - adv_time + 1):\n end_time = start_time + adv_time\n temp_view = view_list[end_time] - view_list[start_time]\n if temp_view > max_view:\n max_view = temp_view\n ans_time = start_time\n if ans_time != 0:\n ans_time += 1\n return int2str(ans_time)\n\n\ndef str2int(strtime: str):\n hh, mm, ss = strtime.split(':')\n return 3600 * int(hh) + 60 * int(mm) + int(ss)\n\n\ndef int2str(inttime: int):\n hh = inttime // 3600\n mm = inttime % 3600 // 60\n ss = inttime % 60\n return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)\n\n\nif __name__ == '__main__':\n play_time = '02:03:55'\n adv_time = '00:14:15'\n logs = ['01:20:15-01:45:14', '00:25:50-00:48:29', '00:40:31-01:00:00',\n '01:37:44-02:02:30', '01:30:59-01:53:29']\n result = '01:30:59'\n print(solution(play_time, adv_time, logs))\n print(result)\n play_time = '99:59:59'\n adv_time = '25:00:00'\n logs = ['69:59:59-89:59:59', '01:00:00-21:00:00', '79:59:59-99:59:59',\n '11:00:00-31:00:00']\n result = '01:00:00'\n print(solution(play_time, adv_time, logs))\n print(result)\n play_time = '50:00:00'\n adv_time = '50:00:00'\n logs = ['15:36:51-38:21:49', '10:14:18-15:36:51', '38:21:49-42:51:45']\n result = '00:00:00'\n print(solution(play_time, adv_time, logs))\n print(result)\n", "step-5": "from collections import deque\ndef solution(play_time, adv_time, logs):\n\n '''\n Strategy : \n adv_start_time을 log start time 부터 < 995959 - adv time\n sliding window \n\n Step 1. \n String time -> integer time\n\n Step 2. pseudo code : Two pointer algorithm\n max time = 0\n \n return max time\n '''\n ## Step 1.\n MAX = str2int(play_time)\n max_view = 0\n ans_time = 0\n adv_time = str2int(adv_time)\n logs = [[str2int(log.split(\"-\")[0]),str2int(log.split(\"-\")[1])] for log in logs]\n view_list = [0] * (MAX+1)\n ## Step 2.\n ## 도함수\n for start_time,end_time in logs:\n view_list[start_time] += 1\n view_list[end_time] -= 1\n\n ## 함수\n for i in range(1,MAX+1):\n view_list[i] = view_list[i]+view_list[i-1]\n\n ## 누적 합\n for i in range(1,MAX+1):\n view_list[i] = view_list[i]+view_list[i-1]\n \n\n for start_time in range(MAX-adv_time+1):\n ## start time 0,1,2,... MAX-adv_time\n ## end time adv_time, ... MAX\n end_time = start_time + adv_time\n temp_view = view_list[end_time] - view_list[start_time]\n if temp_view > max_view:\n max_view = temp_view\n ans_time = start_time\n if ans_time != 0:\n ans_time += 1\n return int2str(ans_time)\n\ndef str2int(strtime:str):\n hh,mm,ss = strtime.split(\":\")\n return 3600*int(hh)+60*int(mm)+int(ss)\n\ndef int2str(inttime:int):\n hh = inttime//3600\n mm = (inttime%3600)//60\n ss = inttime%60\n return str(hh).zfill(2)+\":\"+str(mm).zfill(2)+\":\"+str(ss).zfill(2)\n\n\nif __name__ == \"__main__\":\n play_time = \"02:03:55\"\n adv_time = \"00:14:15\"\n logs = [\"01:20:15-01:45:14\", \"00:25:50-00:48:29\", \"00:40:31-01:00:00\", \"01:37:44-02:02:30\", \"01:30:59-01:53:29\"]\n result = \"01:30:59\"\n print(solution(play_time, adv_time, logs))\n print(result)\n play_time = \"99:59:59\"\n adv_time = \"25:00:00\"\n logs = [\"69:59:59-89:59:59\", \"01:00:00-21:00:00\", \"79:59:59-99:59:59\", \"11:00:00-31:00:00\"]\n result = \"01:00:00\"\n print(solution(play_time, adv_time, logs))\n print(result)\n play_time = \"50:00:00\"\n adv_time = \"50:00:00\"\n logs = [\"15:36:51-38:21:49\", \"10:14:18-15:36:51\", \"38:21:49-42:51:45\"]\n result = \"00:00:00\"\n print(solution(play_time, adv_time, logs))\n print(result)", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
from http import HTTPStatus from ninja import Router mock_post_router = Router() @mock_post_router.get( "/mock_posts", url_name="mock_post_list", summary="전체 mock post의 list를 반환한다", response={200: None}, ) def retrieve_all_mock_posts(request): return HTTPStatus.OK
normal
{ "blob_id": "dcb57ecf2c72b8ac816bb06986d80544ff97c669", "index": 5915, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\n@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=\n '전체 mock post의 list를 반환한다', response={(200): None})\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n", "step-3": "<mask token>\nmock_post_router = Router()\n\n\n@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=\n '전체 mock post의 list를 반환한다', response={(200): None})\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n", "step-4": "from http import HTTPStatus\nfrom ninja import Router\nmock_post_router = Router()\n\n\n@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=\n '전체 mock post의 list를 반환한다', response={(200): None})\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n", "step-5": "from http import HTTPStatus\n\nfrom ninja import Router\n\nmock_post_router = Router()\n\n\n@mock_post_router.get(\n \"/mock_posts\",\n url_name=\"mock_post_list\",\n summary=\"전체 mock post의 list를 반환한다\",\n response={200: None},\n)\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def select(arr, k): n = len(arr) if not 0 <= k < n: raise ValueError('not valid index in array') if n <= 1: return arr[0] pivot = random.choice(arr) L, E, G = [], [], [] for data in arr: if data < pivot: L.append(data) elif data == pivot: E.append(pivot) else: G.append(data) if k < len(L): return select(L, k) elif k < len(L) + len(E): return pivot else: return select(G, k - (len(L) + len(E))) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def select(arr, k): n = len(arr) if not 0 <= k < n: raise ValueError('not valid index in array') if n <= 1: return arr[0] pivot = random.choice(arr) L, E, G = [], [], [] for data in arr: if data < pivot: L.append(data) elif data == pivot: E.append(pivot) else: G.append(data) if k < len(L): return select(L, k) elif k < len(L) + len(E): return pivot else: return select(G, k - (len(L) + len(E))) <|reserved_special_token_0|> print(select(x, 3)) <|reserved_special_token_1|> <|reserved_special_token_0|> def select(arr, k): n = len(arr) if not 0 <= k < n: raise ValueError('not valid index in array') if n <= 1: return arr[0] pivot = random.choice(arr) L, E, G = [], [], [] for data in arr: if data < pivot: L.append(data) elif data == pivot: E.append(pivot) else: G.append(data) if k < len(L): return select(L, k) elif k < len(L) + len(E): return pivot else: return select(G, k - (len(L) + len(E))) x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] print(select(x, 3)) <|reserved_special_token_1|> <|reserved_special_token_0|> import random def select(arr, k): n = len(arr) if not 0 <= k < n: raise ValueError('not valid index in array') if n <= 1: return arr[0] pivot = random.choice(arr) L, E, G = [], [], [] for data in arr: if data < pivot: L.append(data) elif data == pivot: E.append(pivot) else: G.append(data) if k < len(L): return select(L, k) elif k < len(L) + len(E): return pivot else: return select(G, k - (len(L) + len(E))) x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] print(select(x, 3)) <|reserved_special_token_1|> ''' selection review very similar to quicksort in terms of set up. no need to sort to find kth element in a list but instead can be done in o(n) quick sort can be o(nlogn) if we choose median instead of pivot tips: raise value error for bad index not in between 0 <= k < n basecase of n <=1 --> return arr[0] use L, E, G if k < len(L): select(L, k) if k < select(len(L) + len(E)): return pivot else: select(G, k - len(l)-len(E)) O(n) runtime n + n / 2 + n / 4 + n / 8 + n / 16 + ... = n (1 + 1/2 + 1/4 + 1/8 + ...) = 2n on average worst case is 0(n^2) like quick sort if you pick the worst each time ''' import random def select(arr, k): n = len(arr) if not 0 <= k < n: raise ValueError('not valid index in array') if n <= 1: return arr[0] pivot = random.choice(arr) L, E, G = [],[],[] for data in arr: if data < pivot: L.append(data) elif data == pivot: E.append(pivot) else: G.append(data) if k < len(L): return select(L, k) elif k < (len(L) + len(E)): return pivot else: return select(G, k - (len(L) + len(E))) x = [1,2,3,4,5,6,7,8,9,10] print(select(x,3))
flexible
{ "blob_id": "69d3a39dc024929eaf6fb77e38a7a818d2886cf7", "index": 8512, "step-1": "<mask token>\n\n\ndef select(arr, k):\n n = len(arr)\n if not 0 <= k < n:\n raise ValueError('not valid index in array')\n if n <= 1:\n return arr[0]\n pivot = random.choice(arr)\n L, E, G = [], [], []\n for data in arr:\n if data < pivot:\n L.append(data)\n elif data == pivot:\n E.append(pivot)\n else:\n G.append(data)\n if k < len(L):\n return select(L, k)\n elif k < len(L) + len(E):\n return pivot\n else:\n return select(G, k - (len(L) + len(E)))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef select(arr, k):\n n = len(arr)\n if not 0 <= k < n:\n raise ValueError('not valid index in array')\n if n <= 1:\n return arr[0]\n pivot = random.choice(arr)\n L, E, G = [], [], []\n for data in arr:\n if data < pivot:\n L.append(data)\n elif data == pivot:\n E.append(pivot)\n else:\n G.append(data)\n if k < len(L):\n return select(L, k)\n elif k < len(L) + len(E):\n return pivot\n else:\n return select(G, k - (len(L) + len(E)))\n\n\n<mask token>\nprint(select(x, 3))\n", "step-3": "<mask token>\n\n\ndef select(arr, k):\n n = len(arr)\n if not 0 <= k < n:\n raise ValueError('not valid index in array')\n if n <= 1:\n return arr[0]\n pivot = random.choice(arr)\n L, E, G = [], [], []\n for data in arr:\n if data < pivot:\n L.append(data)\n elif data == pivot:\n E.append(pivot)\n else:\n G.append(data)\n if k < len(L):\n return select(L, k)\n elif k < len(L) + len(E):\n return pivot\n else:\n return select(G, k - (len(L) + len(E)))\n\n\nx = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nprint(select(x, 3))\n", "step-4": "<mask token>\nimport random\n\n\ndef select(arr, k):\n n = len(arr)\n if not 0 <= k < n:\n raise ValueError('not valid index in array')\n if n <= 1:\n return arr[0]\n pivot = random.choice(arr)\n L, E, G = [], [], []\n for data in arr:\n if data < pivot:\n L.append(data)\n elif data == pivot:\n E.append(pivot)\n else:\n G.append(data)\n if k < len(L):\n return select(L, k)\n elif k < len(L) + len(E):\n return pivot\n else:\n return select(G, k - (len(L) + len(E)))\n\n\nx = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nprint(select(x, 3))\n", "step-5": "'''\nselection review\nvery similar to quicksort in terms of set up.\nno need to sort to find kth element in a list\nbut instead can be done in o(n)\nquick sort can be o(nlogn) if we choose median\ninstead of pivot\n\ntips:\nraise value error for bad index not in between 0 <= k < n\nbasecase of n <=1 --> return arr[0]\nuse L, E, G\nif k < len(L):\n\tselect(L, k)\nif k < select(len(L) + len(E)):\n\treturn pivot\nelse:\n\tselect(G, k - len(l)-len(E))\n\nO(n) runtime\n\nn + n / 2 + n / 4 + n / 8 + n / 16 + ... = n (1 + 1/2 + 1/4 + 1/8 + ...)\n= 2n on average\nworst case is 0(n^2) like quick sort if you pick the worst each\ntime\n'''\nimport random\n\ndef select(arr, k):\n\tn = len(arr)\n\tif not 0 <= k < n:\n\t\traise ValueError('not valid index in array')\n\tif n <= 1:\n\t\treturn arr[0]\n\tpivot = random.choice(arr)\n\tL, E, G = [],[],[]\n\tfor data in arr:\n\t\tif data < pivot:\n\t\t\tL.append(data)\n\t\telif data == pivot:\n\t\t\tE.append(pivot)\n\t\telse:\n\t\t\tG.append(data)\n\tif k < len(L):\n\t\treturn select(L, k)\n\telif k < (len(L) + len(E)):\n\t\treturn pivot\n\telse:\n\t\treturn select(G, k - (len(L) + len(E)))\n\nx = [1,2,3,4,5,6,7,8,9,10]\nprint(select(x,3))\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
""" Question: You are given a string s consisting only of digits 0-9, commas ,, and dots . Your task is to complete the regex_pattern defined below, which will be used to re.split() all of the , and . symbols in s. It’s guaranteed that every comma and every dot in s is preceded and followed by a digit. Sample Input: 100,000,000.000 Sample Output: 100 000 000 000 """ # Solution: import re regex_pattern = r"[,.]" print("\n".join(re.split(regex_pattern, input())))
normal
{ "blob_id": "020691fe2c7e7092d45415b72ce1804618421a2a", "index": 9519, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('\\n'.join(re.split(regex_pattern, input())))\n", "step-3": "<mask token>\nregex_pattern = '[,.]'\nprint('\\n'.join(re.split(regex_pattern, input())))\n", "step-4": "<mask token>\nimport re\nregex_pattern = '[,.]'\nprint('\\n'.join(re.split(regex_pattern, input())))\n", "step-5": "\"\"\"\nQuestion:\n\nYou are given a string s consisting only of digits 0-9, commas ,, and dots .\n\nYour task is to complete the regex_pattern defined below, which will be used to\nre.split() all of the , and . symbols in s.\n\nIt’s guaranteed that every comma and every dot in s is preceded and followed\nby a digit.\n\nSample Input:\n 100,000,000.000\n\nSample Output:\n 100\n 000\n 000\n 000\n\"\"\"\n\n# Solution:\n\n\nimport re\n\nregex_pattern = r\"[,.]\"\n\nprint(\"\\n\".join(re.split(regex_pattern, input())))\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import requests import json import io import sys names = ['abc-news', 'abc-news-au', 'aftenposten','al-jazeera-english','ars-technica','associated-press','australian-financial-review','axios', 'bbc-news', 'bbc-sport','bleacher-report', 'bloomberg','breitbart-news','business-insider', 'business-insider-uk','buzzfeed','cbc-news', 'cbs-news','cnbc','cnn','crypto-coins-news','daily-mail','engadget','entertainment-weekly','espn','engadget','espn-cric-info','financial-post','financial-times','football-italia','fortune','fox-sports','fox-news','four-four-two','google-news','google-news-ca','google-news-uk','google-news-in''google-news-au','hacker-new','ign','independent','mashable','metro','mirror','mtv-news','medical-news-today','mtv-news-uk','national-geographic','msnbc','nbc-news','news24','new-scientist','newsweek','news-com-au','new-york-magazine','next-big-future','nfl-news','nhl-news','politico','polygon','recode','reuters','reddit-r-all','rte','techradar','the-economist','the-globe-and-mail','the-guardian-au','the-guardian-uk','techcrunch','the-hill','talksport','the-hindu','the-irish-times','the-lad-bible','the-huffington-post','the-new-york-times','the-times-of-india','the-telegraph','the-verge','the-wall-street-journal','the-washington-post','time','usa-today','vice-news','wired','xinhua-net','der-tagesspiegel'] sys.stdout=open("/sources/output20180401.json","a+") print("[") sys.stdout.close() for name in names: url = ('https://newsapi.org/v2/everything?sources='+name+'&pageSize=100&language=en&from=2018-04-01&to=2018-04-01&apiKey=c0456841cb6a4dc794e3ec64e86b7e6e') count = 0 response = requests.get(url) sys.stdout=open("/sources/output20180401.json","a+") print(json.dumps(response.json())) print(",") sys.stdout.close() sys.stdout=open("/sources/output20180401.json","a+") print("]") sys.stdout.close() #&from=2018-03-28
normal
{ "blob_id": "590baf17d9fdad9f52869fa354112d3aa5f7d5f0", "index": 8943, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('[')\nsys.stdout.close()\nfor name in names:\n url = ('https://newsapi.org/v2/everything?sources=' + name +\n '&pageSize=100&language=en&from=2018-04-01&to=2018-04-01&apiKey=c0456841cb6a4dc794e3ec64e86b7e6e'\n )\n count = 0\n response = requests.get(url)\n sys.stdout = open('/sources/output20180401.json', 'a+')\n print(json.dumps(response.json()))\n print(',')\n sys.stdout.close()\n<mask token>\nprint(']')\nsys.stdout.close()\n", "step-3": "<mask token>\nnames = ['abc-news', 'abc-news-au', 'aftenposten', 'al-jazeera-english',\n 'ars-technica', 'associated-press', 'australian-financial-review',\n 'axios', 'bbc-news', 'bbc-sport', 'bleacher-report', 'bloomberg',\n 'breitbart-news', 'business-insider', 'business-insider-uk', 'buzzfeed',\n 'cbc-news', 'cbs-news', 'cnbc', 'cnn', 'crypto-coins-news',\n 'daily-mail', 'engadget', 'entertainment-weekly', 'espn', 'engadget',\n 'espn-cric-info', 'financial-post', 'financial-times',\n 'football-italia', 'fortune', 'fox-sports', 'fox-news', 'four-four-two',\n 'google-news', 'google-news-ca', 'google-news-uk',\n 'google-news-ingoogle-news-au', 'hacker-new', 'ign', 'independent',\n 'mashable', 'metro', 'mirror', 'mtv-news', 'medical-news-today',\n 'mtv-news-uk', 'national-geographic', 'msnbc', 'nbc-news', 'news24',\n 'new-scientist', 'newsweek', 'news-com-au', 'new-york-magazine',\n 'next-big-future', 'nfl-news', 'nhl-news', 'politico', 'polygon',\n 'recode', 'reuters', 'reddit-r-all', 'rte', 'techradar',\n 'the-economist', 'the-globe-and-mail', 'the-guardian-au',\n 'the-guardian-uk', 'techcrunch', 'the-hill', 'talksport', 'the-hindu',\n 'the-irish-times', 'the-lad-bible', 'the-huffington-post',\n 'the-new-york-times', 'the-times-of-india', 'the-telegraph',\n 'the-verge', 'the-wall-street-journal', 'the-washington-post', 'time',\n 'usa-today', 'vice-news', 'wired', 'xinhua-net', 'der-tagesspiegel']\nsys.stdout = open('/sources/output20180401.json', 'a+')\nprint('[')\nsys.stdout.close()\nfor name in names:\n url = ('https://newsapi.org/v2/everything?sources=' + name +\n '&pageSize=100&language=en&from=2018-04-01&to=2018-04-01&apiKey=c0456841cb6a4dc794e3ec64e86b7e6e'\n )\n count = 0\n response = requests.get(url)\n sys.stdout = open('/sources/output20180401.json', 'a+')\n print(json.dumps(response.json()))\n print(',')\n sys.stdout.close()\nsys.stdout = open('/sources/output20180401.json', 'a+')\nprint(']')\nsys.stdout.close()\n", "step-4": "import requests\nimport json\nimport io\nimport sys\nnames = ['abc-news', 'abc-news-au', 'aftenposten', 'al-jazeera-english',\n 'ars-technica', 'associated-press', 'australian-financial-review',\n 'axios', 'bbc-news', 'bbc-sport', 'bleacher-report', 'bloomberg',\n 'breitbart-news', 'business-insider', 'business-insider-uk', 'buzzfeed',\n 'cbc-news', 'cbs-news', 'cnbc', 'cnn', 'crypto-coins-news',\n 'daily-mail', 'engadget', 'entertainment-weekly', 'espn', 'engadget',\n 'espn-cric-info', 'financial-post', 'financial-times',\n 'football-italia', 'fortune', 'fox-sports', 'fox-news', 'four-four-two',\n 'google-news', 'google-news-ca', 'google-news-uk',\n 'google-news-ingoogle-news-au', 'hacker-new', 'ign', 'independent',\n 'mashable', 'metro', 'mirror', 'mtv-news', 'medical-news-today',\n 'mtv-news-uk', 'national-geographic', 'msnbc', 'nbc-news', 'news24',\n 'new-scientist', 'newsweek', 'news-com-au', 'new-york-magazine',\n 'next-big-future', 'nfl-news', 'nhl-news', 'politico', 'polygon',\n 'recode', 'reuters', 'reddit-r-all', 'rte', 'techradar',\n 'the-economist', 'the-globe-and-mail', 'the-guardian-au',\n 'the-guardian-uk', 'techcrunch', 'the-hill', 'talksport', 'the-hindu',\n 'the-irish-times', 'the-lad-bible', 'the-huffington-post',\n 'the-new-york-times', 'the-times-of-india', 'the-telegraph',\n 'the-verge', 'the-wall-street-journal', 'the-washington-post', 'time',\n 'usa-today', 'vice-news', 'wired', 'xinhua-net', 'der-tagesspiegel']\nsys.stdout = open('/sources/output20180401.json', 'a+')\nprint('[')\nsys.stdout.close()\nfor name in names:\n url = ('https://newsapi.org/v2/everything?sources=' + name +\n '&pageSize=100&language=en&from=2018-04-01&to=2018-04-01&apiKey=c0456841cb6a4dc794e3ec64e86b7e6e'\n )\n count = 0\n response = requests.get(url)\n sys.stdout = open('/sources/output20180401.json', 'a+')\n print(json.dumps(response.json()))\n print(',')\n sys.stdout.close()\nsys.stdout = open('/sources/output20180401.json', 'a+')\nprint(']')\nsys.stdout.close()\n", "step-5": "import requests\nimport json\nimport io\nimport sys\n\nnames = ['abc-news', 'abc-news-au', 'aftenposten','al-jazeera-english','ars-technica','associated-press','australian-financial-review','axios', 'bbc-news', 'bbc-sport','bleacher-report', 'bloomberg','breitbart-news','business-insider', 'business-insider-uk','buzzfeed','cbc-news', 'cbs-news','cnbc','cnn','crypto-coins-news','daily-mail','engadget','entertainment-weekly','espn','engadget','espn-cric-info','financial-post','financial-times','football-italia','fortune','fox-sports','fox-news','four-four-two','google-news','google-news-ca','google-news-uk','google-news-in''google-news-au','hacker-new','ign','independent','mashable','metro','mirror','mtv-news','medical-news-today','mtv-news-uk','national-geographic','msnbc','nbc-news','news24','new-scientist','newsweek','news-com-au','new-york-magazine','next-big-future','nfl-news','nhl-news','politico','polygon','recode','reuters','reddit-r-all','rte','techradar','the-economist','the-globe-and-mail','the-guardian-au','the-guardian-uk','techcrunch','the-hill','talksport','the-hindu','the-irish-times','the-lad-bible','the-huffington-post','the-new-york-times','the-times-of-india','the-telegraph','the-verge','the-wall-street-journal','the-washington-post','time','usa-today','vice-news','wired','xinhua-net','der-tagesspiegel']\nsys.stdout=open(\"/sources/output20180401.json\",\"a+\")\nprint(\"[\")\nsys.stdout.close()\nfor name in names:\n\turl = ('https://newsapi.org/v2/everything?sources='+name+'&pageSize=100&language=en&from=2018-04-01&to=2018-04-01&apiKey=c0456841cb6a4dc794e3ec64e86b7e6e')\n\tcount = 0\n\tresponse = requests.get(url) \n\tsys.stdout=open(\"/sources/output20180401.json\",\"a+\")\n\tprint(json.dumps(response.json()))\n\tprint(\",\")\n\tsys.stdout.close()\n\nsys.stdout=open(\"/sources/output20180401.json\",\"a+\")\nprint(\"]\")\nsys.stdout.close()\n#&from=2018-03-28\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def get_jwst_coords(outDir): log.info('Creating and saving aperture') jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT) jwst_pup.display(colorbar=False) plt.title('JWST telescope pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf')) plt.clf() jwst_pup.display(colorbar=False) plt.title('JWST telescope exit pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) ycen *= -1 plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf')) pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL, grid_size=FLAT_DIAM, return_scale=True) util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits')) seg_position = np.zeros((NB_SEG, 2)) for i in range(NB_SEG + 1): if i == 0: continue else: seg_position[i - 1, 1], seg_position[i - 1, 0 ] = jwst_pup._hex_center(i) seg_position[i - 1, 1] *= -1 return seg_position def nircam_coro(filter, fpm, ppm, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Create NIRCam image with specified filter and coronagraph, and aberration input. Parameters ---------- filter : string Filter name fpm : string Name of focal-plane mask ppm : string Name of Lyot stop (ppm = "pupil-plane mask") Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc.image_mask = fpm nc.pupil_mask = ppm nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def nircam_nocoro(filter, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Parameters ---------- filter : string Filter name Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def set_up_nircam(): """Return a configured instance of the NIRCam simulator on JWST. Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros the OTE. Returns ------- Tuple of NIRCam instance, and its OTE """ nircam = webbpsf.NIRCam() nircam.include_si_wfe = False nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name') nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop') nircam, ote = webbpsf.enable_adjustable_ote(nircam) ote.zero(zero_original=True) return nircam, ote def set_up_cgi(): """Return a configured instance of the CGI simulator on RST. Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads the FPM setting from the configfile. Returns ------- CGI instrument instance """ webbpsf.setup_logging('ERROR') mode_in = CONFIG_PASTIS.get('RST', 'mode') nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures')) nbactuator_in = int(np.sqrt(nbactuator)) if nbactuator_in ** 2 != nbactuator: error_msg = ( f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!' ) log.error(error_msg) raise ValueError(error_msg) cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in)) cgi.include_si_wfe = False cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer') cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm') cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop') cgi.camera = CONFIG_PASTIS.get('RST', 'camera') cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name') return cgi def display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title= 'OPD and PSF', **kwargs): """Display OTE and PSF of a JWST instrument next to each other. Adapted from: https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb Parameters ---------- inst : WebbPSF instrument instance for example: webbpsf.NIRCam() ote : OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument) opd_vmax : float max display value for the OPD psf_vmax : float max display valued for PSF title : string plot title kwargs """ psf = inst.calc_psf(nlambda=1) plt.figure(figsize=(12, 8)) ax1 = plt.subplot(121) ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation= 'horizontal', title='OPD with aberrated segments') plt.subplot(122) webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0, colorbar_orientation='horizontal', title='PSF simulation') plt.suptitle(title, fontsize=16) <|reserved_special_token_1|> <|reserved_special_token_0|> try: import webbpsf os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path' ) WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER except ImportError: log.info('WebbPSF was not imported.') <|reserved_special_token_0|> def get_jwst_coords(outDir): log.info('Creating and saving aperture') jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT) jwst_pup.display(colorbar=False) plt.title('JWST telescope pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf')) plt.clf() jwst_pup.display(colorbar=False) plt.title('JWST telescope exit pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) ycen *= -1 plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf')) pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL, grid_size=FLAT_DIAM, return_scale=True) util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits')) seg_position = np.zeros((NB_SEG, 2)) for i in range(NB_SEG + 1): if i == 0: continue else: seg_position[i - 1, 1], seg_position[i - 1, 0 ] = jwst_pup._hex_center(i) seg_position[i - 1, 1] *= -1 return seg_position def nircam_coro(filter, fpm, ppm, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Create NIRCam image with specified filter and coronagraph, and aberration input. Parameters ---------- filter : string Filter name fpm : string Name of focal-plane mask ppm : string Name of Lyot stop (ppm = "pupil-plane mask") Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc.image_mask = fpm nc.pupil_mask = ppm nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def nircam_nocoro(filter, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Parameters ---------- filter : string Filter name Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def set_up_nircam(): """Return a configured instance of the NIRCam simulator on JWST. Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros the OTE. Returns ------- Tuple of NIRCam instance, and its OTE """ nircam = webbpsf.NIRCam() nircam.include_si_wfe = False nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name') nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop') nircam, ote = webbpsf.enable_adjustable_ote(nircam) ote.zero(zero_original=True) return nircam, ote def set_up_cgi(): """Return a configured instance of the CGI simulator on RST. Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads the FPM setting from the configfile. Returns ------- CGI instrument instance """ webbpsf.setup_logging('ERROR') mode_in = CONFIG_PASTIS.get('RST', 'mode') nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures')) nbactuator_in = int(np.sqrt(nbactuator)) if nbactuator_in ** 2 != nbactuator: error_msg = ( f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!' ) log.error(error_msg) raise ValueError(error_msg) cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in)) cgi.include_si_wfe = False cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer') cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm') cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop') cgi.camera = CONFIG_PASTIS.get('RST', 'camera') cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name') return cgi def display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title= 'OPD and PSF', **kwargs): """Display OTE and PSF of a JWST instrument next to each other. Adapted from: https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb Parameters ---------- inst : WebbPSF instrument instance for example: webbpsf.NIRCam() ote : OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument) opd_vmax : float max display value for the OPD psf_vmax : float max display valued for PSF title : string plot title kwargs """ psf = inst.calc_psf(nlambda=1) plt.figure(figsize=(12, 8)) ax1 = plt.subplot(121) ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation= 'horizontal', title='OPD with aberrated segments') plt.subplot(122) webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0, colorbar_orientation='horizontal', title='PSF simulation') plt.suptitle(title, fontsize=16) <|reserved_special_token_1|> <|reserved_special_token_0|> log = logging.getLogger() try: import webbpsf os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path' ) WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER except ImportError: log.info('WebbPSF was not imported.') NB_SEG = CONFIG_PASTIS.getint('JWST', 'nb_subapertures') FLAT_TO_FLAT = CONFIG_PASTIS.getfloat('JWST', 'flat_to_flat') WVLN = CONFIG_PASTIS.getfloat('JWST', 'lambda') * u.nm IM_SIZE_PUPIL = CONFIG_PASTIS.getint('numerical', 'tel_size_px') FLAT_DIAM = CONFIG_PASTIS.getfloat('JWST', 'flat_diameter') * u.m IM_SIZE_E2E = CONFIG_PASTIS.getint('numerical', 'im_size_px_webbpsf') def get_jwst_coords(outDir): log.info('Creating and saving aperture') jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT) jwst_pup.display(colorbar=False) plt.title('JWST telescope pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf')) plt.clf() jwst_pup.display(colorbar=False) plt.title('JWST telescope exit pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) ycen *= -1 plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf')) pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL, grid_size=FLAT_DIAM, return_scale=True) util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits')) seg_position = np.zeros((NB_SEG, 2)) for i in range(NB_SEG + 1): if i == 0: continue else: seg_position[i - 1, 1], seg_position[i - 1, 0 ] = jwst_pup._hex_center(i) seg_position[i - 1, 1] *= -1 return seg_position def nircam_coro(filter, fpm, ppm, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Create NIRCam image with specified filter and coronagraph, and aberration input. Parameters ---------- filter : string Filter name fpm : string Name of focal-plane mask ppm : string Name of Lyot stop (ppm = "pupil-plane mask") Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc.image_mask = fpm nc.pupil_mask = ppm nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def nircam_nocoro(filter, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Parameters ---------- filter : string Filter name Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def set_up_nircam(): """Return a configured instance of the NIRCam simulator on JWST. Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros the OTE. Returns ------- Tuple of NIRCam instance, and its OTE """ nircam = webbpsf.NIRCam() nircam.include_si_wfe = False nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name') nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop') nircam, ote = webbpsf.enable_adjustable_ote(nircam) ote.zero(zero_original=True) return nircam, ote def set_up_cgi(): """Return a configured instance of the CGI simulator on RST. Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads the FPM setting from the configfile. Returns ------- CGI instrument instance """ webbpsf.setup_logging('ERROR') mode_in = CONFIG_PASTIS.get('RST', 'mode') nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures')) nbactuator_in = int(np.sqrt(nbactuator)) if nbactuator_in ** 2 != nbactuator: error_msg = ( f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!' ) log.error(error_msg) raise ValueError(error_msg) cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in)) cgi.include_si_wfe = False cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer') cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm') cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop') cgi.camera = CONFIG_PASTIS.get('RST', 'camera') cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name') return cgi def display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title= 'OPD and PSF', **kwargs): """Display OTE and PSF of a JWST instrument next to each other. Adapted from: https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb Parameters ---------- inst : WebbPSF instrument instance for example: webbpsf.NIRCam() ote : OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument) opd_vmax : float max display value for the OPD psf_vmax : float max display valued for PSF title : string plot title kwargs """ psf = inst.calc_psf(nlambda=1) plt.figure(figsize=(12, 8)) ax1 = plt.subplot(121) ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation= 'horizontal', title='OPD with aberrated segments') plt.subplot(122) webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0, colorbar_orientation='horizontal', title='PSF simulation') plt.suptitle(title, fontsize=16) <|reserved_special_token_1|> <|reserved_special_token_0|> import os import numpy as np import matplotlib.pyplot as plt import astropy.units as u import logging import poppy from pastis.config import CONFIG_PASTIS import pastis.util as util log = logging.getLogger() try: import webbpsf os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path' ) WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER except ImportError: log.info('WebbPSF was not imported.') NB_SEG = CONFIG_PASTIS.getint('JWST', 'nb_subapertures') FLAT_TO_FLAT = CONFIG_PASTIS.getfloat('JWST', 'flat_to_flat') WVLN = CONFIG_PASTIS.getfloat('JWST', 'lambda') * u.nm IM_SIZE_PUPIL = CONFIG_PASTIS.getint('numerical', 'tel_size_px') FLAT_DIAM = CONFIG_PASTIS.getfloat('JWST', 'flat_diameter') * u.m IM_SIZE_E2E = CONFIG_PASTIS.getint('numerical', 'im_size_px_webbpsf') def get_jwst_coords(outDir): log.info('Creating and saving aperture') jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT) jwst_pup.display(colorbar=False) plt.title('JWST telescope pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf')) plt.clf() jwst_pup.display(colorbar=False) plt.title('JWST telescope exit pupil') for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) ycen *= -1 plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf')) pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL, grid_size=FLAT_DIAM, return_scale=True) util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits')) seg_position = np.zeros((NB_SEG, 2)) for i in range(NB_SEG + 1): if i == 0: continue else: seg_position[i - 1, 1], seg_position[i - 1, 0 ] = jwst_pup._hex_center(i) seg_position[i - 1, 1] *= -1 return seg_position def nircam_coro(filter, fpm, ppm, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Create NIRCam image with specified filter and coronagraph, and aberration input. Parameters ---------- filter : string Filter name fpm : string Name of focal-plane mask ppm : string Name of Lyot stop (ppm = "pupil-plane mask") Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc.image_mask = fpm nc.pupil_mask = ppm nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def nircam_nocoro(filter, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Parameters ---------- filter : string Filter name Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ nc = webbpsf.NIRCam() nc.filter = filter nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def set_up_nircam(): """Return a configured instance of the NIRCam simulator on JWST. Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros the OTE. Returns ------- Tuple of NIRCam instance, and its OTE """ nircam = webbpsf.NIRCam() nircam.include_si_wfe = False nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name') nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop') nircam, ote = webbpsf.enable_adjustable_ote(nircam) ote.zero(zero_original=True) return nircam, ote def set_up_cgi(): """Return a configured instance of the CGI simulator on RST. Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads the FPM setting from the configfile. Returns ------- CGI instrument instance """ webbpsf.setup_logging('ERROR') mode_in = CONFIG_PASTIS.get('RST', 'mode') nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures')) nbactuator_in = int(np.sqrt(nbactuator)) if nbactuator_in ** 2 != nbactuator: error_msg = ( f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!' ) log.error(error_msg) raise ValueError(error_msg) cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in)) cgi.include_si_wfe = False cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer') cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm') cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop') cgi.camera = CONFIG_PASTIS.get('RST', 'camera') cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name') return cgi def display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title= 'OPD and PSF', **kwargs): """Display OTE and PSF of a JWST instrument next to each other. Adapted from: https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb Parameters ---------- inst : WebbPSF instrument instance for example: webbpsf.NIRCam() ote : OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument) opd_vmax : float max display value for the OPD psf_vmax : float max display valued for PSF title : string plot title kwargs """ psf = inst.calc_psf(nlambda=1) plt.figure(figsize=(12, 8)) ax1 = plt.subplot(121) ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation= 'horizontal', title='OPD with aberrated segments') plt.subplot(122) webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0, colorbar_orientation='horizontal', title='PSF simulation') plt.suptitle(title, fontsize=16) <|reserved_special_token_1|> """ This is a module containing convenience functions to create the JWST aperture and coronagraphic images with WebbPSF. """ import os import numpy as np import matplotlib.pyplot as plt import astropy.units as u import logging import poppy from pastis.config import CONFIG_PASTIS import pastis.util as util log = logging.getLogger() try: import webbpsf # Setting to ensure that PyCharm finds the webbpsf-data folder. If you don't know where it is, find it with: # webbpsf.utils.get_webbpsf_data_path() # --> e.g.: >>source activate pastis >>ipython >>import webbpsf >>webbpsf.utils.get_webbpsf_data_path() os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path') WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER except ImportError: log.info('WebbPSF was not imported.') NB_SEG = CONFIG_PASTIS.getint('JWST', 'nb_subapertures') FLAT_TO_FLAT = CONFIG_PASTIS.getfloat('JWST', 'flat_to_flat') WVLN = CONFIG_PASTIS.getfloat('JWST', 'lambda') * u.nm IM_SIZE_PUPIL = CONFIG_PASTIS.getint('numerical', 'tel_size_px') FLAT_DIAM = CONFIG_PASTIS.getfloat('JWST', 'flat_diameter') * u.m IM_SIZE_E2E = CONFIG_PASTIS.getint('numerical', 'im_size_px_webbpsf') def get_jwst_coords(outDir): ### Generate the pupil with segments and spiders # Use poppy to create JWST aperture without spiders log.info('Creating and saving aperture') jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT) # Create JWST pupil without spiders jwst_pup.display(colorbar=False) # Show pupil (will be saved to file) plt.title('JWST telescope pupil') # Number the segments for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) # -0.1 is for shifting the numbers closer to the segment centers # Save a PDF version of the pupil plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf')) # Since WebbPSF creates images by controlling the exit pupil, # let's also create the exit pupil instead of the entrance pupil. # I do this by flipping the y-coordinates of the segments. plt.clf() jwst_pup.display(colorbar=False) # Show pupil plt.title('JWST telescope exit pupil') # Number the segments for i in range(NB_SEG + 1): ycen, xcen = jwst_pup._hex_center(i) ycen *= -1 plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) # -0.1 is for shifting the number labels closer to the segment centers # Save a PDF version of the exit pupil plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf')) # Get pupil as fits image pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL, grid_size=FLAT_DIAM, return_scale=True) # If the image size is equivalent to the total diameter of the telescope, we don't have to worry about sampling later # But for the JWST case with poppy it makes such a small difference that I am skipping it for now util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits')) ### Get the coordinates of the central pixel of each segment seg_position = np.zeros((NB_SEG, 2)) # holds x and y position of each central pixel for i in range(NB_SEG + 1): # our pupil is still counting the central segment as seg 0, so we need to include it # in the loop, however, we will just discard the values for the center if i == 0: # Segment 0 is the central segment, which we want to skip and not put into seg_position continue # Continues with the next iteration of the loop else: seg_position[i - 1, 1], seg_position[i - 1, 0] = jwst_pup._hex_center(i) # y, x = center position seg_position[i - 1, 1] *= -1 # inverting the y-axis because we want to work with the EXIT PUPIL!!! # Units are meters!!! return seg_position def nircam_coro(filter, fpm, ppm, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Create NIRCam image with specified filter and coronagraph, and aberration input. Parameters ---------- filter : string Filter name fpm : string Name of focal-plane mask ppm : string Name of Lyot stop (ppm = "pupil-plane mask") Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ # Set up NIRCam and coronagraph nc = webbpsf.NIRCam() nc.filter = filter nc.image_mask = fpm nc.pupil_mask = ppm # Adjust OTE with aberrations nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False # set SI internal WFE to zero ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) # Calculate PSF psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def nircam_nocoro(filter, Aber_WSS): """-- Deprecated function still used in analytical PASTIS and some notebooks. -- Parameters ---------- filter : string Filter name Aber_WSS : list or array list of Zernike coefficients ordered in WSS convention and in METERS Returns ------- psf_webbpsf : ndarray PSF image """ # Create NIRCam object nc = webbpsf.NIRCam() # Set filter nc.filter = filter # Adjust OTE with aberrations nc, ote = webbpsf.enable_adjustable_ote(nc) nc.include_si_wfe = False # set SI internal WFE to zero ote.reset() ote.zero() for i in range(NB_SEG): seg = WSS_SEGS[i].split('-')[0] ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :]) # Calculate PSF psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1) psf_webbpsf = psf_nc[1].data return psf_webbpsf def set_up_nircam(): """Return a configured instance of the NIRCam simulator on JWST. Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros the OTE. Returns ------- Tuple of NIRCam instance, and its OTE """ nircam = webbpsf.NIRCam() nircam.include_si_wfe = False nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name') nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop') nircam, ote = webbpsf.enable_adjustable_ote(nircam) ote.zero(zero_original=True) # https://github.com/spacetelescope/webbpsf/blob/96537c459996f682ac6e9af808809ca13fb85e87/webbpsf/opds.py#L1125 return nircam, ote def set_up_cgi(): """Return a configured instance of the CGI simulator on RST. Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads the FPM setting from the configfile. Returns ------- CGI instrument instance """ webbpsf.setup_logging('ERROR') # Set actuators numbesr mode_in = CONFIG_PASTIS.get('RST', 'mode') nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures')) nbactuator_in = int(np.sqrt(nbactuator)) if nbactuator_in**2 != nbactuator: error_msg = f"The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!" log.error(error_msg) raise ValueError(error_msg) cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in)) cgi.include_si_wfe = False cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer') cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm') cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop') cgi.camera = CONFIG_PASTIS.get('RST', 'camera') cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name') return cgi def display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title="OPD and PSF", **kwargs): """Display OTE and PSF of a JWST instrument next to each other. Adapted from: https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb Parameters ---------- inst : WebbPSF instrument instance for example: webbpsf.NIRCam() ote : OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument) opd_vmax : float max display value for the OPD psf_vmax : float max display valued for PSF title : string plot title kwargs """ psf = inst.calc_psf(nlambda=1) plt.figure(figsize=(12, 8)) ax1 = plt.subplot(121) ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation='horizontal', title='OPD with aberrated segments') plt.subplot(122) webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 1e4, colorbar_orientation='horizontal', title="PSF simulation") plt.suptitle(title, fontsize=16)
flexible
{ "blob_id": "e59763991974f4bfcd126879dd9aabd44bd89419", "index": 1406, "step-1": "<mask token>\n\n\ndef get_jwst_coords(outDir):\n log.info('Creating and saving aperture')\n jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT)\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf'))\n plt.clf()\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope exit pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n ycen *= -1\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf'))\n pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL,\n grid_size=FLAT_DIAM, return_scale=True)\n util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits'))\n seg_position = np.zeros((NB_SEG, 2))\n for i in range(NB_SEG + 1):\n if i == 0:\n continue\n else:\n seg_position[i - 1, 1], seg_position[i - 1, 0\n ] = jwst_pup._hex_center(i)\n seg_position[i - 1, 1] *= -1\n return seg_position\n\n\ndef nircam_coro(filter, fpm, ppm, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Create NIRCam image with specified filter and coronagraph, and aberration input.\n\n Parameters\n ----------\n filter : string\n Filter name\n fpm : string\n Name of focal-plane mask\n ppm : string\n Name of Lyot stop (ppm = \"pupil-plane mask\")\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc.image_mask = fpm\n nc.pupil_mask = ppm\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef nircam_nocoro(filter, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Parameters\n ----------\n filter : string\n Filter name\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef set_up_nircam():\n \"\"\"Return a configured instance of the NIRCam simulator on JWST.\n\n Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros\n the OTE.\n\n Returns\n -------\n Tuple of NIRCam instance, and its OTE\n \"\"\"\n nircam = webbpsf.NIRCam()\n nircam.include_si_wfe = False\n nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name')\n nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop')\n nircam, ote = webbpsf.enable_adjustable_ote(nircam)\n ote.zero(zero_original=True)\n return nircam, ote\n\n\ndef set_up_cgi():\n \"\"\"Return a configured instance of the CGI simulator on RST.\n\n Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads\n the FPM setting from the configfile.\n\n Returns\n -------\n CGI instrument instance\n \"\"\"\n webbpsf.setup_logging('ERROR')\n mode_in = CONFIG_PASTIS.get('RST', 'mode')\n nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures'))\n nbactuator_in = int(np.sqrt(nbactuator))\n if nbactuator_in ** 2 != nbactuator:\n error_msg = (\n f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!'\n )\n log.error(error_msg)\n raise ValueError(error_msg)\n cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in))\n cgi.include_si_wfe = False\n cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer')\n cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm')\n cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop')\n cgi.camera = CONFIG_PASTIS.get('RST', 'camera')\n cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name')\n return cgi\n\n\ndef display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title=\n 'OPD and PSF', **kwargs):\n \"\"\"Display OTE and PSF of a JWST instrument next to each other.\n\n Adapted from:\n https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb\n\n Parameters\n ----------\n inst : WebbPSF instrument instance\n for example: webbpsf.NIRCam()\n ote :\n OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument)\n opd_vmax : float\n max display value for the OPD\n psf_vmax : float\n max display valued for PSF\n title : string\n plot title\n kwargs\n \"\"\"\n psf = inst.calc_psf(nlambda=1)\n plt.figure(figsize=(12, 8))\n ax1 = plt.subplot(121)\n ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation=\n 'horizontal', title='OPD with aberrated segments')\n plt.subplot(122)\n webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0,\n colorbar_orientation='horizontal', title='PSF simulation')\n plt.suptitle(title, fontsize=16)\n", "step-2": "<mask token>\ntry:\n import webbpsf\n os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path'\n )\n WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER\nexcept ImportError:\n log.info('WebbPSF was not imported.')\n<mask token>\n\n\ndef get_jwst_coords(outDir):\n log.info('Creating and saving aperture')\n jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT)\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf'))\n plt.clf()\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope exit pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n ycen *= -1\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf'))\n pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL,\n grid_size=FLAT_DIAM, return_scale=True)\n util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits'))\n seg_position = np.zeros((NB_SEG, 2))\n for i in range(NB_SEG + 1):\n if i == 0:\n continue\n else:\n seg_position[i - 1, 1], seg_position[i - 1, 0\n ] = jwst_pup._hex_center(i)\n seg_position[i - 1, 1] *= -1\n return seg_position\n\n\ndef nircam_coro(filter, fpm, ppm, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Create NIRCam image with specified filter and coronagraph, and aberration input.\n\n Parameters\n ----------\n filter : string\n Filter name\n fpm : string\n Name of focal-plane mask\n ppm : string\n Name of Lyot stop (ppm = \"pupil-plane mask\")\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc.image_mask = fpm\n nc.pupil_mask = ppm\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef nircam_nocoro(filter, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Parameters\n ----------\n filter : string\n Filter name\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef set_up_nircam():\n \"\"\"Return a configured instance of the NIRCam simulator on JWST.\n\n Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros\n the OTE.\n\n Returns\n -------\n Tuple of NIRCam instance, and its OTE\n \"\"\"\n nircam = webbpsf.NIRCam()\n nircam.include_si_wfe = False\n nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name')\n nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop')\n nircam, ote = webbpsf.enable_adjustable_ote(nircam)\n ote.zero(zero_original=True)\n return nircam, ote\n\n\ndef set_up_cgi():\n \"\"\"Return a configured instance of the CGI simulator on RST.\n\n Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads\n the FPM setting from the configfile.\n\n Returns\n -------\n CGI instrument instance\n \"\"\"\n webbpsf.setup_logging('ERROR')\n mode_in = CONFIG_PASTIS.get('RST', 'mode')\n nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures'))\n nbactuator_in = int(np.sqrt(nbactuator))\n if nbactuator_in ** 2 != nbactuator:\n error_msg = (\n f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!'\n )\n log.error(error_msg)\n raise ValueError(error_msg)\n cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in))\n cgi.include_si_wfe = False\n cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer')\n cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm')\n cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop')\n cgi.camera = CONFIG_PASTIS.get('RST', 'camera')\n cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name')\n return cgi\n\n\ndef display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title=\n 'OPD and PSF', **kwargs):\n \"\"\"Display OTE and PSF of a JWST instrument next to each other.\n\n Adapted from:\n https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb\n\n Parameters\n ----------\n inst : WebbPSF instrument instance\n for example: webbpsf.NIRCam()\n ote :\n OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument)\n opd_vmax : float\n max display value for the OPD\n psf_vmax : float\n max display valued for PSF\n title : string\n plot title\n kwargs\n \"\"\"\n psf = inst.calc_psf(nlambda=1)\n plt.figure(figsize=(12, 8))\n ax1 = plt.subplot(121)\n ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation=\n 'horizontal', title='OPD with aberrated segments')\n plt.subplot(122)\n webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0,\n colorbar_orientation='horizontal', title='PSF simulation')\n plt.suptitle(title, fontsize=16)\n", "step-3": "<mask token>\nlog = logging.getLogger()\ntry:\n import webbpsf\n os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path'\n )\n WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER\nexcept ImportError:\n log.info('WebbPSF was not imported.')\nNB_SEG = CONFIG_PASTIS.getint('JWST', 'nb_subapertures')\nFLAT_TO_FLAT = CONFIG_PASTIS.getfloat('JWST', 'flat_to_flat')\nWVLN = CONFIG_PASTIS.getfloat('JWST', 'lambda') * u.nm\nIM_SIZE_PUPIL = CONFIG_PASTIS.getint('numerical', 'tel_size_px')\nFLAT_DIAM = CONFIG_PASTIS.getfloat('JWST', 'flat_diameter') * u.m\nIM_SIZE_E2E = CONFIG_PASTIS.getint('numerical', 'im_size_px_webbpsf')\n\n\ndef get_jwst_coords(outDir):\n log.info('Creating and saving aperture')\n jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT)\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf'))\n plt.clf()\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope exit pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n ycen *= -1\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf'))\n pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL,\n grid_size=FLAT_DIAM, return_scale=True)\n util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits'))\n seg_position = np.zeros((NB_SEG, 2))\n for i in range(NB_SEG + 1):\n if i == 0:\n continue\n else:\n seg_position[i - 1, 1], seg_position[i - 1, 0\n ] = jwst_pup._hex_center(i)\n seg_position[i - 1, 1] *= -1\n return seg_position\n\n\ndef nircam_coro(filter, fpm, ppm, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Create NIRCam image with specified filter and coronagraph, and aberration input.\n\n Parameters\n ----------\n filter : string\n Filter name\n fpm : string\n Name of focal-plane mask\n ppm : string\n Name of Lyot stop (ppm = \"pupil-plane mask\")\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc.image_mask = fpm\n nc.pupil_mask = ppm\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef nircam_nocoro(filter, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Parameters\n ----------\n filter : string\n Filter name\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef set_up_nircam():\n \"\"\"Return a configured instance of the NIRCam simulator on JWST.\n\n Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros\n the OTE.\n\n Returns\n -------\n Tuple of NIRCam instance, and its OTE\n \"\"\"\n nircam = webbpsf.NIRCam()\n nircam.include_si_wfe = False\n nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name')\n nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop')\n nircam, ote = webbpsf.enable_adjustable_ote(nircam)\n ote.zero(zero_original=True)\n return nircam, ote\n\n\ndef set_up_cgi():\n \"\"\"Return a configured instance of the CGI simulator on RST.\n\n Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads\n the FPM setting from the configfile.\n\n Returns\n -------\n CGI instrument instance\n \"\"\"\n webbpsf.setup_logging('ERROR')\n mode_in = CONFIG_PASTIS.get('RST', 'mode')\n nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures'))\n nbactuator_in = int(np.sqrt(nbactuator))\n if nbactuator_in ** 2 != nbactuator:\n error_msg = (\n f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!'\n )\n log.error(error_msg)\n raise ValueError(error_msg)\n cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in))\n cgi.include_si_wfe = False\n cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer')\n cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm')\n cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop')\n cgi.camera = CONFIG_PASTIS.get('RST', 'camera')\n cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name')\n return cgi\n\n\ndef display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title=\n 'OPD and PSF', **kwargs):\n \"\"\"Display OTE and PSF of a JWST instrument next to each other.\n\n Adapted from:\n https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb\n\n Parameters\n ----------\n inst : WebbPSF instrument instance\n for example: webbpsf.NIRCam()\n ote :\n OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument)\n opd_vmax : float\n max display value for the OPD\n psf_vmax : float\n max display valued for PSF\n title : string\n plot title\n kwargs\n \"\"\"\n psf = inst.calc_psf(nlambda=1)\n plt.figure(figsize=(12, 8))\n ax1 = plt.subplot(121)\n ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation=\n 'horizontal', title='OPD with aberrated segments')\n plt.subplot(122)\n webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0,\n colorbar_orientation='horizontal', title='PSF simulation')\n plt.suptitle(title, fontsize=16)\n", "step-4": "<mask token>\nimport os\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport astropy.units as u\nimport logging\nimport poppy\nfrom pastis.config import CONFIG_PASTIS\nimport pastis.util as util\nlog = logging.getLogger()\ntry:\n import webbpsf\n os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path'\n )\n WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER\nexcept ImportError:\n log.info('WebbPSF was not imported.')\nNB_SEG = CONFIG_PASTIS.getint('JWST', 'nb_subapertures')\nFLAT_TO_FLAT = CONFIG_PASTIS.getfloat('JWST', 'flat_to_flat')\nWVLN = CONFIG_PASTIS.getfloat('JWST', 'lambda') * u.nm\nIM_SIZE_PUPIL = CONFIG_PASTIS.getint('numerical', 'tel_size_px')\nFLAT_DIAM = CONFIG_PASTIS.getfloat('JWST', 'flat_diameter') * u.m\nIM_SIZE_E2E = CONFIG_PASTIS.getint('numerical', 'im_size_px_webbpsf')\n\n\ndef get_jwst_coords(outDir):\n log.info('Creating and saving aperture')\n jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT)\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf'))\n plt.clf()\n jwst_pup.display(colorbar=False)\n plt.title('JWST telescope exit pupil')\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n ycen *= -1\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1))\n plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf'))\n pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL,\n grid_size=FLAT_DIAM, return_scale=True)\n util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits'))\n seg_position = np.zeros((NB_SEG, 2))\n for i in range(NB_SEG + 1):\n if i == 0:\n continue\n else:\n seg_position[i - 1, 1], seg_position[i - 1, 0\n ] = jwst_pup._hex_center(i)\n seg_position[i - 1, 1] *= -1\n return seg_position\n\n\ndef nircam_coro(filter, fpm, ppm, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Create NIRCam image with specified filter and coronagraph, and aberration input.\n\n Parameters\n ----------\n filter : string\n Filter name\n fpm : string\n Name of focal-plane mask\n ppm : string\n Name of Lyot stop (ppm = \"pupil-plane mask\")\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc.image_mask = fpm\n nc.pupil_mask = ppm\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef nircam_nocoro(filter, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Parameters\n ----------\n filter : string\n Filter name\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n return psf_webbpsf\n\n\ndef set_up_nircam():\n \"\"\"Return a configured instance of the NIRCam simulator on JWST.\n\n Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros\n the OTE.\n\n Returns\n -------\n Tuple of NIRCam instance, and its OTE\n \"\"\"\n nircam = webbpsf.NIRCam()\n nircam.include_si_wfe = False\n nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name')\n nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop')\n nircam, ote = webbpsf.enable_adjustable_ote(nircam)\n ote.zero(zero_original=True)\n return nircam, ote\n\n\ndef set_up_cgi():\n \"\"\"Return a configured instance of the CGI simulator on RST.\n\n Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads\n the FPM setting from the configfile.\n\n Returns\n -------\n CGI instrument instance\n \"\"\"\n webbpsf.setup_logging('ERROR')\n mode_in = CONFIG_PASTIS.get('RST', 'mode')\n nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures'))\n nbactuator_in = int(np.sqrt(nbactuator))\n if nbactuator_in ** 2 != nbactuator:\n error_msg = (\n f'The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!'\n )\n log.error(error_msg)\n raise ValueError(error_msg)\n cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in))\n cgi.include_si_wfe = False\n cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer')\n cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm')\n cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop')\n cgi.camera = CONFIG_PASTIS.get('RST', 'camera')\n cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name')\n return cgi\n\n\ndef display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title=\n 'OPD and PSF', **kwargs):\n \"\"\"Display OTE and PSF of a JWST instrument next to each other.\n\n Adapted from:\n https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb\n\n Parameters\n ----------\n inst : WebbPSF instrument instance\n for example: webbpsf.NIRCam()\n ote :\n OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument)\n opd_vmax : float\n max display value for the OPD\n psf_vmax : float\n max display valued for PSF\n title : string\n plot title\n kwargs\n \"\"\"\n psf = inst.calc_psf(nlambda=1)\n plt.figure(figsize=(12, 8))\n ax1 = plt.subplot(121)\n ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation=\n 'horizontal', title='OPD with aberrated segments')\n plt.subplot(122)\n webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 10000.0,\n colorbar_orientation='horizontal', title='PSF simulation')\n plt.suptitle(title, fontsize=16)\n", "step-5": "\"\"\"\nThis is a module containing convenience functions to create the JWST aperture and coronagraphic images with WebbPSF.\n\"\"\"\nimport os\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport astropy.units as u\nimport logging\nimport poppy\n\nfrom pastis.config import CONFIG_PASTIS\nimport pastis.util as util\n\nlog = logging.getLogger()\n\ntry:\n import webbpsf\n\n # Setting to ensure that PyCharm finds the webbpsf-data folder. If you don't know where it is, find it with:\n # webbpsf.utils.get_webbpsf_data_path()\n # --> e.g.: >>source activate pastis >>ipython >>import webbpsf >>webbpsf.utils.get_webbpsf_data_path()\n os.environ['WEBBPSF_PATH'] = CONFIG_PASTIS.get('local', 'webbpsf_data_path')\n WSS_SEGS = webbpsf.constants.SEGNAMES_WSS_ORDER\n\nexcept ImportError:\n log.info('WebbPSF was not imported.')\n\n\nNB_SEG = CONFIG_PASTIS.getint('JWST', 'nb_subapertures')\nFLAT_TO_FLAT = CONFIG_PASTIS.getfloat('JWST', 'flat_to_flat')\nWVLN = CONFIG_PASTIS.getfloat('JWST', 'lambda') * u.nm\nIM_SIZE_PUPIL = CONFIG_PASTIS.getint('numerical', 'tel_size_px')\nFLAT_DIAM = CONFIG_PASTIS.getfloat('JWST', 'flat_diameter') * u.m\nIM_SIZE_E2E = CONFIG_PASTIS.getint('numerical', 'im_size_px_webbpsf')\n\n\ndef get_jwst_coords(outDir):\n\n ### Generate the pupil with segments and spiders\n\n # Use poppy to create JWST aperture without spiders\n log.info('Creating and saving aperture')\n jwst_pup = poppy.MultiHexagonAperture(rings=2, flattoflat=FLAT_TO_FLAT) # Create JWST pupil without spiders\n jwst_pup.display(colorbar=False) # Show pupil (will be saved to file)\n plt.title('JWST telescope pupil')\n # Number the segments\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) # -0.1 is for shifting the numbers closer to the segment centers\n # Save a PDF version of the pupil\n plt.savefig(os.path.join(outDir, 'JWST_aperture.pdf'))\n\n # Since WebbPSF creates images by controlling the exit pupil,\n # let's also create the exit pupil instead of the entrance pupil.\n # I do this by flipping the y-coordinates of the segments.\n plt.clf()\n jwst_pup.display(colorbar=False) # Show pupil\n plt.title('JWST telescope exit pupil')\n # Number the segments\n for i in range(NB_SEG + 1):\n ycen, xcen = jwst_pup._hex_center(i)\n ycen *= -1\n plt.annotate(str(i), size='x-large', xy=(xcen - 0.1, ycen - 0.1)) # -0.1 is for shifting the number labels closer to the segment centers\n # Save a PDF version of the exit pupil\n plt.savefig(os.path.join(outDir, 'JWST_exit_pupil.pdf'))\n\n # Get pupil as fits image\n pupil_dir = jwst_pup.sample(wavelength=WVLN, npix=IM_SIZE_PUPIL, grid_size=FLAT_DIAM, return_scale=True)\n # If the image size is equivalent to the total diameter of the telescope, we don't have to worry about sampling later\n # But for the JWST case with poppy it makes such a small difference that I am skipping it for now\n util.write_fits(pupil_dir[0], os.path.join(outDir, 'pupil.fits'))\n\n ### Get the coordinates of the central pixel of each segment\n seg_position = np.zeros((NB_SEG, 2)) # holds x and y position of each central pixel\n for i in range(NB_SEG + 1): # our pupil is still counting the central segment as seg 0, so we need to include it\n # in the loop, however, we will just discard the values for the center\n if i == 0: # Segment 0 is the central segment, which we want to skip and not put into seg_position\n continue # Continues with the next iteration of the loop\n else:\n seg_position[i - 1, 1], seg_position[i - 1, 0] = jwst_pup._hex_center(i) # y, x = center position\n seg_position[i - 1, 1] *= -1 # inverting the y-axis because we want to work with the EXIT PUPIL!!!\n # Units are meters!!!\n\n return seg_position\n\n\ndef nircam_coro(filter, fpm, ppm, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Create NIRCam image with specified filter and coronagraph, and aberration input.\n\n Parameters\n ----------\n filter : string\n Filter name\n fpm : string\n Name of focal-plane mask\n ppm : string\n Name of Lyot stop (ppm = \"pupil-plane mask\")\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n\n # Set up NIRCam and coronagraph\n nc = webbpsf.NIRCam()\n nc.filter = filter\n nc.image_mask = fpm\n nc.pupil_mask = ppm\n\n # Adjust OTE with aberrations\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False # set SI internal WFE to zero\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n\n # Calculate PSF\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n\n return psf_webbpsf\n\n\ndef nircam_nocoro(filter, Aber_WSS):\n \"\"\"-- Deprecated function still used in analytical PASTIS and some notebooks. --\n\n Parameters\n ----------\n filter : string\n Filter name\n Aber_WSS : list or array\n list of Zernike coefficients ordered in WSS convention and in METERS\n\n Returns\n -------\n psf_webbpsf : ndarray\n PSF image\n \"\"\"\n\n # Create NIRCam object\n nc = webbpsf.NIRCam()\n # Set filter\n nc.filter = filter\n\n # Adjust OTE with aberrations\n nc, ote = webbpsf.enable_adjustable_ote(nc)\n nc.include_si_wfe = False # set SI internal WFE to zero\n ote.reset()\n ote.zero()\n for i in range(NB_SEG):\n seg = WSS_SEGS[i].split('-')[0]\n ote._apply_hexikes_to_seg(seg, Aber_WSS[i, :])\n\n # Calculate PSF\n psf_nc = nc.calc_psf(oversample=1, fov_pixels=int(IM_SIZE_E2E), nlambda=1)\n psf_webbpsf = psf_nc[1].data\n\n return psf_webbpsf\n\n\ndef set_up_nircam():\n \"\"\"Return a configured instance of the NIRCam simulator on JWST.\n\n Sets up the Lyot stop and filter from the configfile, turns of science instrument (SI) internal WFE and zeros\n the OTE.\n\n Returns\n -------\n Tuple of NIRCam instance, and its OTE\n \"\"\"\n\n nircam = webbpsf.NIRCam()\n nircam.include_si_wfe = False\n nircam.filter = CONFIG_PASTIS.get('JWST', 'filter_name')\n nircam.pupil_mask = CONFIG_PASTIS.get('JWST', 'pupil_plane_stop')\n\n nircam, ote = webbpsf.enable_adjustable_ote(nircam)\n ote.zero(zero_original=True) # https://github.com/spacetelescope/webbpsf/blob/96537c459996f682ac6e9af808809ca13fb85e87/webbpsf/opds.py#L1125\n\n return nircam, ote\n\n\ndef set_up_cgi():\n \"\"\"Return a configured instance of the CGI simulator on RST.\n\n Sets up the Lyot stop and filter from the configfile, turns off science instrument (SI) internal WFE, and reads\n the FPM setting from the configfile.\n\n Returns\n -------\n CGI instrument instance\n \"\"\"\n\n webbpsf.setup_logging('ERROR')\n\n # Set actuators numbesr\n mode_in = CONFIG_PASTIS.get('RST', 'mode')\n nbactuator = int(CONFIG_PASTIS.get('RST', 'nb_subapertures'))\n nbactuator_in = int(np.sqrt(nbactuator))\n if nbactuator_in**2 != nbactuator:\n error_msg = f\"The number of subapertures from config_pastis.ini is {nbactuator}, which is not the square of the actuators per row (={nbactuator_in})!\"\n log.error(error_msg)\n raise ValueError(error_msg)\n cgi = webbpsf.roman.CGI(mode=mode_in, nbactuator=int(nbactuator_in))\n\n cgi.include_si_wfe = False\n cgi.apodizer = CONFIG_PASTIS.get('RST', 'apodizer')\n cgi.fpm = CONFIG_PASTIS.get('RST', 'fpm')\n cgi.lyotstop = CONFIG_PASTIS.get('RST', 'lyotstop')\n cgi.camera = CONFIG_PASTIS.get('RST', 'camera')\n cgi.filter = CONFIG_PASTIS.get('RST', 'filter_name')\n\n return cgi\n\n\ndef display_ote_and_psf(inst, ote, opd_vmax=500, psf_vmax=0.1, title=\"OPD and PSF\", **kwargs):\n \"\"\"Display OTE and PSF of a JWST instrument next to each other.\n\n Adapted from:\n https://github.com/spacetelescope/webbpsf/blob/develop/notebooks/Simulated%20OTE%20Mirror%20Move%20Demo.ipynb\n\n Parameters\n ----------\n inst : WebbPSF instrument instance\n for example: webbpsf.NIRCam()\n ote :\n OTE of inst, usually obtained with: instrument, ote = webbpsf.enable_adjustable_ote(instrument)\n opd_vmax : float\n max display value for the OPD\n psf_vmax : float\n max display valued for PSF\n title : string\n plot title\n kwargs\n \"\"\"\n\n psf = inst.calc_psf(nlambda=1)\n plt.figure(figsize=(12, 8))\n ax1 = plt.subplot(121)\n ote.display_opd(ax=ax1, vmax=opd_vmax, colorbar_orientation='horizontal', title='OPD with aberrated segments')\n plt.subplot(122)\n webbpsf.display_psf(psf, ext=2, vmax=psf_vmax, vmin=psf_vmax / 1e4, colorbar_orientation='horizontal', title=\"PSF simulation\")\n plt.suptitle(title, fontsize=16)\n", "step-ids": [ 6, 7, 8, 9, 10 ] }
[ 6, 7, 8, 9, 10 ]
<|reserved_special_token_0|> def get_version(filename): import ast version = None with open(filename) as f: for line in f: if line.startswith('__version__'): version = ast.parse(line).body[0].value.s break else: raise ValueError('No version found in %r.' % filename) if version is None: raise ValueError(filename) return version <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> if not sys.version_info >= (3, 6, 0): msg = 'Unsupported version %s' % sys.version raise Exception(msg) def get_version(filename): import ast version = None with open(filename) as f: for line in f: if line.startswith('__version__'): version = ast.parse(line).body[0].value.s break else: raise ValueError('No version found in %r.' % filename) if version is None: raise ValueError(filename) return version <|reserved_special_token_0|> setup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests', 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[ 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor', 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [ 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']}) <|reserved_special_token_1|> <|reserved_special_token_0|> if not sys.version_info >= (3, 6, 0): msg = 'Unsupported version %s' % sys.version raise Exception(msg) def get_version(filename): import ast version = None with open(filename) as f: for line in f: if line.startswith('__version__'): version = ast.parse(line).body[0].value.s break else: raise ValueError('No version found in %r.' % filename) if version is None: raise ValueError(filename) return version version = get_version(filename='src/zuper_nodes/__init__.py') line = 'z5' setup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests', 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[ 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor', 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [ 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']}) <|reserved_special_token_1|> from setuptools import setup import sys if not sys.version_info >= (3, 6, 0): msg = 'Unsupported version %s' % sys.version raise Exception(msg) def get_version(filename): import ast version = None with open(filename) as f: for line in f: if line.startswith('__version__'): version = ast.parse(line).body[0].value.s break else: raise ValueError('No version found in %r.' % filename) if version is None: raise ValueError(filename) return version version = get_version(filename='src/zuper_nodes/__init__.py') line = 'z5' setup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests', 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[ 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor', 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [ 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']}) <|reserved_special_token_1|> from setuptools import setup import sys if not sys.version_info >= (3, 6, 0): msg = 'Unsupported version %s' % sys.version raise Exception(msg) def get_version(filename): import ast version = None with open(filename) as f: for line in f: if line.startswith('__version__'): version = ast.parse(line).body[0].value.s break else: raise ValueError('No version found in %r.' % filename) if version is None: raise ValueError(filename) return version version = get_version(filename='src/zuper_nodes/__init__.py') line = 'z5' setup( name=f'zuper-nodes-{line}', version=version, keywords='', package_dir={'': 'src'}, packages=[ 'zuper_nodes', 'zuper_nodes_tests', 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests', ], install_requires=[ 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor', 'zuper-ipce-z5', 'cbor2', 'base58', ], entry_points={ 'console_scripts': [ 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main', ], }, )
flexible
{ "blob_id": "d3b55863c6e3a1b6cbdcec37db81ee42b769938d", "index": 9039, "step-1": "<mask token>\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\n<mask token>\n", "step-2": "<mask token>\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\n<mask token>\nsetup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir\n ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests',\n 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[\n 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor',\n 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']})\n", "step-3": "<mask token>\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\nversion = get_version(filename='src/zuper_nodes/__init__.py')\nline = 'z5'\nsetup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir\n ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests',\n 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[\n 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor',\n 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']})\n", "step-4": "from setuptools import setup\nimport sys\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\nversion = get_version(filename='src/zuper_nodes/__init__.py')\nline = 'z5'\nsetup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir\n ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests',\n 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[\n 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor',\n 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']})\n", "step-5": "from setuptools import setup\n\nimport sys\n\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\nversion = get_version(filename='src/zuper_nodes/__init__.py')\nline = 'z5'\nsetup(\n name=f'zuper-nodes-{line}',\n version=version,\n keywords='',\n package_dir={'': 'src'},\n packages=[\n 'zuper_nodes',\n 'zuper_nodes_tests',\n 'zuper_nodes_wrapper',\n 'zuper_nodes_wrapper_tests',\n ],\n install_requires=[\n 'compmake',\n 'pyparsing',\n 'PyContracts',\n 'networkx<=2.2',\n 'termcolor',\n 'zuper-ipce-z5',\n 'cbor2',\n 'base58',\n ],\n entry_points={\n 'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main',\n ],\n },\n)\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class PrimaryuserConfig(AppConfig): <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class PrimaryuserConfig(AppConfig): name = 'PrimaryUser' <|reserved_special_token_1|> from django.apps import AppConfig class PrimaryuserConfig(AppConfig): name = 'PrimaryUser'
flexible
{ "blob_id": "82c10076ba73723b696e3e33280296c2a24f20b9", "index": 4187, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass PrimaryuserConfig(AppConfig):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass PrimaryuserConfig(AppConfig):\n name = 'PrimaryUser'\n", "step-4": "from django.apps import AppConfig\n\n\nclass PrimaryuserConfig(AppConfig):\n name = 'PrimaryUser'\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# Uses python3 import numpy as np def fibonaci(n): if n <= 1: return n F = np.empty(shape=(n + 1)) F[0] = 0 F[1] = 1 for i in range(2, len(F)): F[i] = F[i - 1] + F[i - 2] return F[n] n = int(input()) print(int(fibonaci(n)))
normal
{ "blob_id": "67516551b595c02e70a0ba4005df8a97ba71b17e", "index": 1419, "step-1": "<mask token>\n\n\ndef fibonaci(n):\n if n <= 1:\n return n\n F = np.empty(shape=n + 1)\n F[0] = 0\n F[1] = 1\n for i in range(2, len(F)):\n F[i] = F[i - 1] + F[i - 2]\n return F[n]\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef fibonaci(n):\n if n <= 1:\n return n\n F = np.empty(shape=n + 1)\n F[0] = 0\n F[1] = 1\n for i in range(2, len(F)):\n F[i] = F[i - 1] + F[i - 2]\n return F[n]\n\n\n<mask token>\nprint(int(fibonaci(n)))\n", "step-3": "<mask token>\n\n\ndef fibonaci(n):\n if n <= 1:\n return n\n F = np.empty(shape=n + 1)\n F[0] = 0\n F[1] = 1\n for i in range(2, len(F)):\n F[i] = F[i - 1] + F[i - 2]\n return F[n]\n\n\nn = int(input())\nprint(int(fibonaci(n)))\n", "step-4": "import numpy as np\n\n\ndef fibonaci(n):\n if n <= 1:\n return n\n F = np.empty(shape=n + 1)\n F[0] = 0\n F[1] = 1\n for i in range(2, len(F)):\n F[i] = F[i - 1] + F[i - 2]\n return F[n]\n\n\nn = int(input())\nprint(int(fibonaci(n)))\n", "step-5": "# Uses python3\nimport numpy as np\n\n\ndef fibonaci(n):\n if n <= 1:\n return n\n\n F = np.empty(shape=(n + 1))\n F[0] = 0\n F[1] = 1\n for i in range(2, len(F)):\n F[i] = F[i - 1] + F[i - 2]\n\n return F[n]\n\n\nn = int(input())\nprint(int(fibonaci(n)))\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> etc_dictionary = {'2 30대': '이삼십대', '20~30대': '이삼십대', '20, 30대': '이십대 삼십대', '1+1': '원플러스원', '3에서 6개월인': '3개월에서 육개월인'} english_dictionary = {'Devsisters': '데브시스터즈', 'track': '트랙', 'LA': '엘에이', 'LG': '엘지', 'KOREA': '코리아', 'JSA': '제이에스에이', 'PGA': '피지에이', 'GA': '지에이', 'idol': '아이돌', 'KTX': '케이티엑스', 'AC': '에이씨', 'DVD': '디비디', 'US': '유에스', 'CNN': '씨엔엔', 'LPGA': '엘피지에이', 'P': '피', 'L': '엘', 'T': '티', 'B': '비', 'C': '씨', 'BIFF': '비아이에프에프', 'GV': '지비', 'IT': '아이티', 'IQ': '아이큐', 'JTBC': '제이티비씨', 'trickle down effect': '트리클 다운 이펙트', 'trickle up effect': '트리클 업 이펙트', 'down': '다운', 'up': '업', 'FCK': '에프씨케이', 'AP': '에이피', 'WHERETHEWILDTHINGSARE': '', 'Rashomon Effect': '', 'O': '오', 'OO': '오오', 'B': '비', 'GDP': '지디피', 'CIPA': '씨아이피에이', 'YS': '와이에스', 'Y': '와이', 'S': '에스', 'JTBC': '제이티비씨', 'PC': '피씨', 'bill': '빌', 'Halmuny': '하모니', 'X': '엑스', 'SNS': '에스엔에스', 'ability': '어빌리티', 'shy': '', 'CCTV': '씨씨티비', 'IT': '아이티', 'the tenth man': '더 텐쓰 맨', 'L': '엘', 'PC': '피씨', 'YSDJJPMB': '', 'Content Attitude Timing': '컨텐트 애티튜드 타이밍', 'CAT': '캣', 'IS': '아이에스', 'SNS': '에스엔에스', 'K': '케이', 'Y': '와이', 'KDI': '케이디아이', 'DOC': '디오씨', 'CIA': '씨아이에이', 'PBS': '피비에스', 'D': '디', 'PPropertyPositionPowerPrisonPS': '에스', 'francisco': '프란시스코', 'I': '아이', 'III': '아이아이', 'No joke': '노 조크', 'BBK': '비비케이', 'LA': '엘에이', 'Don': '', 't worry be happy': ' 워리 비 해피', 'NO': '엔오', 'it was our sky': '잇 워즈 아워 스카이', 'it is our sky': '잇 이즈 아워 스카이', 'NEIS': '엔이아이에스', 'IMF': '아이엠에프', 'apology': '어폴로지', 'humble': '험블', 'M': '엠', 'Nowhere Man': '노웨어 맨', 'The Tenth Man': '더 텐쓰 맨', 'PBS': '피비에스', 'BBC': '비비씨', 'MRJ': '엠알제이', 'CCTV': '씨씨티비', 'Pick me up': '픽 미 업', 'DNA': '디엔에이', 'UN': '유엔', 'STOP': '스탑', 'PRESS': '프레스', 'not to be': '낫 투비', 'Denial': '디나이얼', 'G': '지', 'IMF': '아이엠에프', 'GDP': '지디피', 'JTBC': '제이티비씨', 'Time flies like an arrow': '타임 플라이즈 라이크 언 애로우', 'DDT': '디디티', 'AI': '에이아이', 'Z': '제트', 'OECD': '오이씨디', 'N': '앤', 'A': '에이', 'MB': '엠비', 'EH': '이에이치', 'IS': '아이에스', 'TV': '티비', 'MIT': '엠아이티', 'KBO': '케이비오', 'I love America': '아이 러브 아메리카', 'SF': '에스에프', 'Q': '큐', 'KFX': '케이에프엑스', 'PM': '피엠', 'Prime Minister': '프라임 미니스터', 'Swordline': '스워드라인', 'TBS': '티비에스', 'DDT': '디디티', 'CS': '씨에스', 'Reflecting Absence': '리플렉팅 앱센스', 'PBS': '피비에스', 'Drum being beaten by everyone': '드럼 빙 비튼 바이 에브리원', 'negative pressure': '네거티브 프레셔', 'F': '에프', 'KIA': '기아', 'FTA': '에프티에이', 'Que sais-je': '', 'UFC': '유에프씨', 'P': '피', 'DJ': '디제이', 'Chaebol': '채벌', 'BBC': '비비씨', 'OECD': '오이씨디', 'BC': '삐씨', 'C': '씨', 'B': '씨', 'KY': '케이와이', 'K': '케이', 'CEO': '씨이오', 'YH': '와이에치', 'IS': '아이에스', 'who are you': '후 얼 유', 'Y': '와이', 'The Devils Advocate': '더 데빌즈 어드보카트', 'YS': '와이에스', 'so sorry': '쏘 쏘리', 'Santa': '산타', 'Big Endian': '빅 엔디안', 'Small Endian': '스몰 엔디안', 'Oh Captain My Captain': '오 캡틴 마이 캡틴', 'AIB': '에이아이비', 'K': '케이', 'PBS': '피비에스'} <|reserved_special_token_1|> # coding: utf-8 etc_dictionary = { '2 30대': '이삼십대', '20~30대': '이삼십대', '20, 30대': '이십대 삼십대', '1+1': '원플러스원', '3에서 6개월인': '3개월에서 육개월인', } english_dictionary = { 'Devsisters': '데브시스터즈', 'track': '트랙', # krbook 'LA': '엘에이', 'LG': '엘지', 'KOREA': '코리아', 'JSA': '제이에스에이', 'PGA': '피지에이', 'GA': '지에이', 'idol': '아이돌', 'KTX': '케이티엑스', 'AC': '에이씨', 'DVD': '디비디', 'US': '유에스', 'CNN': '씨엔엔', 'LPGA': '엘피지에이', 'P': '피', 'L': '엘', 'T': '티', 'B': '비', 'C': '씨', 'BIFF': '비아이에프에프', 'GV': '지비', # JTBC 'IT': '아이티', 'IQ': '아이큐', 'JTBC': '제이티비씨', 'trickle down effect': '트리클 다운 이펙트', 'trickle up effect': '트리클 업 이펙트', 'down': '다운', 'up': '업', 'FCK': '에프씨케이', 'AP': '에이피', 'WHERETHEWILDTHINGSARE': '', 'Rashomon Effect': '', 'O': '오', 'OO': '오오', 'B': '비', 'GDP': '지디피', 'CIPA': '씨아이피에이', 'YS': '와이에스', 'Y': '와이', 'S': '에스', 'JTBC': '제이티비씨', 'PC': '피씨', 'bill': '빌', 'Halmuny': '하모니', ##### 'X': '엑스', 'SNS': '에스엔에스', 'ability': '어빌리티', 'shy': '', 'CCTV': '씨씨티비', 'IT': '아이티', 'the tenth man': '더 텐쓰 맨', #### 'L': '엘', 'PC': '피씨', 'YSDJJPMB': '', ######## 'Content Attitude Timing': '컨텐트 애티튜드 타이밍', 'CAT': '캣', 'IS': '아이에스', 'SNS': '에스엔에스', 'K': '케이', 'Y': '와이', 'KDI': '케이디아이', 'DOC': '디오씨', 'CIA': '씨아이에이', 'PBS': '피비에스', 'D': '디', 'PPropertyPositionPowerPrisonP' 'S': '에스', 'francisco': '프란시스코', 'I': '아이', 'III': '아이아이', ###### 'No joke': '노 조크', 'BBK': '비비케이', 'LA': '엘에이', 'Don': '', 't worry be happy': ' 워리 비 해피', 'NO': '엔오', ##### 'it was our sky': '잇 워즈 아워 스카이', 'it is our sky': '잇 이즈 아워 스카이', #### 'NEIS': '엔이아이에스', ##### 'IMF': '아이엠에프', 'apology': '어폴로지', 'humble': '험블', 'M': '엠', 'Nowhere Man': '노웨어 맨', 'The Tenth Man': '더 텐쓰 맨', 'PBS': '피비에스', 'BBC': '비비씨', 'MRJ': '엠알제이', 'CCTV': '씨씨티비', 'Pick me up': '픽 미 업', 'DNA': '디엔에이', 'UN': '유엔', 'STOP': '스탑', ##### 'PRESS': '프레스', ##### 'not to be': '낫 투비', 'Denial': '디나이얼', 'G': '지', 'IMF': '아이엠에프', 'GDP': '지디피', 'JTBC': '제이티비씨', 'Time flies like an arrow': '타임 플라이즈 라이크 언 애로우', 'DDT': '디디티', 'AI': '에이아이', 'Z': '제트', 'OECD': '오이씨디', 'N': '앤', 'A': '에이', 'MB': '엠비', 'EH': '이에이치', 'IS': '아이에스', 'TV': '티비', 'MIT': '엠아이티', 'KBO': '케이비오', 'I love America': '아이 러브 아메리카', 'SF': '에스에프', 'Q': '큐', 'KFX': '케이에프엑스', 'PM': '피엠', 'Prime Minister': '프라임 미니스터', 'Swordline': '스워드라인', 'TBS': '티비에스', 'DDT': '디디티', 'CS': '씨에스', 'Reflecting Absence': '리플렉팅 앱센스', 'PBS': '피비에스', 'Drum being beaten by everyone': '드럼 빙 비튼 바이 에브리원', 'negative pressure': '네거티브 프레셔', 'F': '에프', 'KIA': '기아', 'FTA': '에프티에이', 'Que sais-je': '', 'UFC': '유에프씨', 'P': '피', 'DJ': '디제이', 'Chaebol': '채벌', 'BBC': '비비씨', 'OECD': '오이씨디', 'BC': '삐씨', 'C': '씨', 'B': '씨', 'KY': '케이와이', 'K': '케이', 'CEO': '씨이오', 'YH': '와이에치', 'IS': '아이에스', 'who are you': '후 얼 유', 'Y': '와이', 'The Devils Advocate': '더 데빌즈 어드보카트', 'YS': '와이에스', 'so sorry': '쏘 쏘리', 'Santa': '산타', 'Big Endian': '빅 엔디안', 'Small Endian': '스몰 엔디안', 'Oh Captain My Captain': '오 캡틴 마이 캡틴', 'AIB': '에이아이비', 'K': '케이', 'PBS': '피비에스', }
flexible
{ "blob_id": "ccd1e57518065963158984dda52297db45ce204e", "index": 2471, "step-1": "<mask token>\n", "step-2": "etc_dictionary = {'2 30대': '이삼십대', '20~30대': '이삼십대', '20, 30대': '이십대 삼십대',\n '1+1': '원플러스원', '3에서 6개월인': '3개월에서 육개월인'}\nenglish_dictionary = {'Devsisters': '데브시스터즈', 'track': '트랙', 'LA': '엘에이',\n 'LG': '엘지', 'KOREA': '코리아', 'JSA': '제이에스에이', 'PGA': '피지에이', 'GA': '지에이',\n 'idol': '아이돌', 'KTX': '케이티엑스', 'AC': '에이씨', 'DVD': '디비디', 'US': '유에스',\n 'CNN': '씨엔엔', 'LPGA': '엘피지에이', 'P': '피', 'L': '엘', 'T': '티', 'B': '비',\n 'C': '씨', 'BIFF': '비아이에프에프', 'GV': '지비', 'IT': '아이티', 'IQ': '아이큐',\n 'JTBC': '제이티비씨', 'trickle down effect': '트리클 다운 이펙트',\n 'trickle up effect': '트리클 업 이펙트', 'down': '다운', 'up': '업', 'FCK':\n '에프씨케이', 'AP': '에이피', 'WHERETHEWILDTHINGSARE': '', 'Rashomon Effect':\n '', 'O': '오', 'OO': '오오', 'B': '비', 'GDP': '지디피', 'CIPA': '씨아이피에이',\n 'YS': '와이에스', 'Y': '와이', 'S': '에스', 'JTBC': '제이티비씨', 'PC': '피씨', 'bill':\n '빌', 'Halmuny': '하모니', 'X': '엑스', 'SNS': '에스엔에스', 'ability': '어빌리티',\n 'shy': '', 'CCTV': '씨씨티비', 'IT': '아이티', 'the tenth man': '더 텐쓰 맨', 'L':\n '엘', 'PC': '피씨', 'YSDJJPMB': '', 'Content Attitude Timing':\n '컨텐트 애티튜드 타이밍', 'CAT': '캣', 'IS': '아이에스', 'SNS': '에스엔에스', 'K': '케이',\n 'Y': '와이', 'KDI': '케이디아이', 'DOC': '디오씨', 'CIA': '씨아이에이', 'PBS': '피비에스',\n 'D': '디', 'PPropertyPositionPowerPrisonPS': '에스', 'francisco': '프란시스코',\n 'I': '아이', 'III': '아이아이', 'No joke': '노 조크', 'BBK': '비비케이', 'LA': '엘에이',\n 'Don': '', 't worry be happy': ' 워리 비 해피', 'NO': '엔오', 'it was our sky':\n '잇 워즈 아워 스카이', 'it is our sky': '잇 이즈 아워 스카이', 'NEIS': '엔이아이에스', 'IMF':\n '아이엠에프', 'apology': '어폴로지', 'humble': '험블', 'M': '엠', 'Nowhere Man':\n '노웨어 맨', 'The Tenth Man': '더 텐쓰 맨', 'PBS': '피비에스', 'BBC': '비비씨', 'MRJ':\n '엠알제이', 'CCTV': '씨씨티비', 'Pick me up': '픽 미 업', 'DNA': '디엔에이', 'UN':\n '유엔', 'STOP': '스탑', 'PRESS': '프레스', 'not to be': '낫 투비', 'Denial':\n '디나이얼', 'G': '지', 'IMF': '아이엠에프', 'GDP': '지디피', 'JTBC': '제이티비씨',\n 'Time flies like an arrow': '타임 플라이즈 라이크 언 애로우', 'DDT': '디디티', 'AI':\n '에이아이', 'Z': '제트', 'OECD': '오이씨디', 'N': '앤', 'A': '에이', 'MB': '엠비',\n 'EH': '이에이치', 'IS': '아이에스', 'TV': '티비', 'MIT': '엠아이티', 'KBO': '케이비오',\n 'I love America': '아이 러브 아메리카', 'SF': '에스에프', 'Q': '큐', 'KFX': '케이에프엑스',\n 'PM': '피엠', 'Prime Minister': '프라임 미니스터', 'Swordline': '스워드라인', 'TBS':\n '티비에스', 'DDT': '디디티', 'CS': '씨에스', 'Reflecting Absence': '리플렉팅 앱센스',\n 'PBS': '피비에스', 'Drum being beaten by everyone': '드럼 빙 비튼 바이 에브리원',\n 'negative pressure': '네거티브 프레셔', 'F': '에프', 'KIA': '기아', 'FTA': '에프티에이',\n 'Que sais-je': '', 'UFC': '유에프씨', 'P': '피', 'DJ': '디제이', 'Chaebol':\n '채벌', 'BBC': '비비씨', 'OECD': '오이씨디', 'BC': '삐씨', 'C': '씨', 'B': '씨',\n 'KY': '케이와이', 'K': '케이', 'CEO': '씨이오', 'YH': '와이에치', 'IS': '아이에스',\n 'who are you': '후 얼 유', 'Y': '와이', 'The Devils Advocate': '더 데빌즈 어드보카트',\n 'YS': '와이에스', 'so sorry': '쏘 쏘리', 'Santa': '산타', 'Big Endian': '빅 엔디안',\n 'Small Endian': '스몰 엔디안', 'Oh Captain My Captain': '오 캡틴 마이 캡틴', 'AIB':\n '에이아이비', 'K': '케이', 'PBS': '피비에스'}\n", "step-3": "# coding: utf-8\r\n\r\netc_dictionary = {\r\n '2 30대': '이삼십대',\r\n '20~30대': '이삼십대',\r\n '20, 30대': '이십대 삼십대',\r\n '1+1': '원플러스원',\r\n '3에서 6개월인': '3개월에서 육개월인',\r\n}\r\n\r\nenglish_dictionary = {\r\n 'Devsisters': '데브시스터즈',\r\n 'track': '트랙',\r\n\r\n # krbook\r\n 'LA': '엘에이',\r\n 'LG': '엘지',\r\n 'KOREA': '코리아',\r\n 'JSA': '제이에스에이',\r\n 'PGA': '피지에이',\r\n 'GA': '지에이',\r\n 'idol': '아이돌',\r\n 'KTX': '케이티엑스',\r\n 'AC': '에이씨',\r\n 'DVD': '디비디',\r\n 'US': '유에스',\r\n 'CNN': '씨엔엔',\r\n 'LPGA': '엘피지에이',\r\n 'P': '피',\r\n 'L': '엘',\r\n 'T': '티',\r\n 'B': '비',\r\n 'C': '씨',\r\n 'BIFF': '비아이에프에프',\r\n 'GV': '지비',\r\n\r\n # JTBC\r\n 'IT': '아이티',\r\n 'IQ': '아이큐',\r\n 'JTBC': '제이티비씨',\r\n 'trickle down effect': '트리클 다운 이펙트',\r\n 'trickle up effect': '트리클 업 이펙트',\r\n 'down': '다운',\r\n 'up': '업',\r\n 'FCK': '에프씨케이',\r\n 'AP': '에이피',\r\n 'WHERETHEWILDTHINGSARE': '',\r\n 'Rashomon Effect': '',\r\n 'O': '오',\r\n 'OO': '오오',\r\n 'B': '비',\r\n 'GDP': '지디피',\r\n 'CIPA': '씨아이피에이',\r\n 'YS': '와이에스',\r\n 'Y': '와이',\r\n 'S': '에스',\r\n 'JTBC': '제이티비씨',\r\n 'PC': '피씨',\r\n 'bill': '빌',\r\n 'Halmuny': '하모니', #####\r\n 'X': '엑스',\r\n 'SNS': '에스엔에스',\r\n 'ability': '어빌리티',\r\n 'shy': '',\r\n 'CCTV': '씨씨티비',\r\n 'IT': '아이티',\r\n 'the tenth man': '더 텐쓰 맨', ####\r\n 'L': '엘',\r\n 'PC': '피씨',\r\n 'YSDJJPMB': '', ########\r\n 'Content Attitude Timing': '컨텐트 애티튜드 타이밍',\r\n 'CAT': '캣',\r\n 'IS': '아이에스',\r\n 'SNS': '에스엔에스',\r\n 'K': '케이',\r\n 'Y': '와이',\r\n 'KDI': '케이디아이',\r\n 'DOC': '디오씨',\r\n 'CIA': '씨아이에이',\r\n 'PBS': '피비에스',\r\n 'D': '디',\r\n 'PPropertyPositionPowerPrisonP'\r\n 'S': '에스',\r\n 'francisco': '프란시스코',\r\n 'I': '아이',\r\n 'III': '아이아이', ######\r\n 'No joke': '노 조크',\r\n 'BBK': '비비케이',\r\n 'LA': '엘에이',\r\n 'Don': '',\r\n 't worry be happy': ' 워리 비 해피',\r\n 'NO': '엔오', #####\r\n 'it was our sky': '잇 워즈 아워 스카이',\r\n 'it is our sky': '잇 이즈 아워 스카이', ####\r\n 'NEIS': '엔이아이에스', #####\r\n 'IMF': '아이엠에프',\r\n 'apology': '어폴로지',\r\n 'humble': '험블',\r\n 'M': '엠',\r\n 'Nowhere Man': '노웨어 맨',\r\n 'The Tenth Man': '더 텐쓰 맨',\r\n 'PBS': '피비에스',\r\n 'BBC': '비비씨',\r\n 'MRJ': '엠알제이',\r\n 'CCTV': '씨씨티비',\r\n 'Pick me up': '픽 미 업',\r\n 'DNA': '디엔에이',\r\n 'UN': '유엔',\r\n 'STOP': '스탑', #####\r\n 'PRESS': '프레스', #####\r\n 'not to be': '낫 투비',\r\n 'Denial': '디나이얼',\r\n 'G': '지',\r\n 'IMF': '아이엠에프',\r\n 'GDP': '지디피',\r\n 'JTBC': '제이티비씨',\r\n 'Time flies like an arrow': '타임 플라이즈 라이크 언 애로우',\r\n 'DDT': '디디티',\r\n 'AI': '에이아이',\r\n 'Z': '제트',\r\n 'OECD': '오이씨디',\r\n 'N': '앤',\r\n 'A': '에이',\r\n 'MB': '엠비',\r\n 'EH': '이에이치',\r\n 'IS': '아이에스',\r\n 'TV': '티비',\r\n 'MIT': '엠아이티',\r\n 'KBO': '케이비오',\r\n 'I love America': '아이 러브 아메리카',\r\n 'SF': '에스에프',\r\n 'Q': '큐',\r\n 'KFX': '케이에프엑스',\r\n 'PM': '피엠',\r\n 'Prime Minister': '프라임 미니스터',\r\n 'Swordline': '스워드라인',\r\n 'TBS': '티비에스',\r\n 'DDT': '디디티',\r\n 'CS': '씨에스',\r\n 'Reflecting Absence': '리플렉팅 앱센스',\r\n 'PBS': '피비에스',\r\n 'Drum being beaten by everyone': '드럼 빙 비튼 바이 에브리원',\r\n 'negative pressure': '네거티브 프레셔',\r\n 'F': '에프',\r\n 'KIA': '기아',\r\n 'FTA': '에프티에이',\r\n 'Que sais-je': '',\r\n 'UFC': '유에프씨',\r\n 'P': '피',\r\n 'DJ': '디제이',\r\n 'Chaebol': '채벌',\r\n 'BBC': '비비씨',\r\n 'OECD': '오이씨디',\r\n 'BC': '삐씨',\r\n 'C': '씨',\r\n 'B': '씨',\r\n 'KY': '케이와이',\r\n 'K': '케이',\r\n 'CEO': '씨이오',\r\n 'YH': '와이에치',\r\n 'IS': '아이에스',\r\n 'who are you': '후 얼 유',\r\n 'Y': '와이',\r\n 'The Devils Advocate': '더 데빌즈 어드보카트',\r\n 'YS': '와이에스',\r\n 'so sorry': '쏘 쏘리',\r\n 'Santa': '산타',\r\n 'Big Endian': '빅 엔디안',\r\n 'Small Endian': '스몰 엔디안',\r\n 'Oh Captain My Captain': '오 캡틴 마이 캡틴',\r\n 'AIB': '에이아이비',\r\n 'K': '케이',\r\n 'PBS': '피비에스',\r\n}\r\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> def test_forgotten_initialized_on_protected(): blueprint = Blueprint('Test') @blueprint.get('/protected') @protected() def protected_hello_world(request): return json({'message': 'hello world'}) @blueprint.route('/scoped') @scoped('something') async def scoped_endpoint(request): return json({'scoped': True}) app = Sanic('sanic-jwt-test') sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True) app.blueprint(blueprint, url_prefix='/test') _, response = app.test_client.post('/test/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanicjwt.config.access_token_name(), None) _, response = app.test_client.get('/test/protected', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' _, response = app.test_client.get('/test/scoped', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' def test_option_method_on_protected(app): sanic_app, sanic_jwt = app @sanic_app.route('/protected/options', methods=['OPTIONS']) @sanic_jwt.protected() async def my_protected_options(request): return text('', status=204) _, response = sanic_app.test_client.options('/protected/options') assert response.status == 204 <|reserved_special_token_0|> def test_inject_user_on_instance(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @sanic_jwt.inject_user() @sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 <|reserved_special_token_0|> def test_inject_user_with_auth_mode_off(app_with_retrieve_user): async def retrieve_user(request, payload, *args, **kwargs): return {'user_id': 123} microservice_app = Sanic('sanic-jwt-test') microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False, retrieve_user=retrieve_user) @microservice_app.route('/protected/user') @microservice_sanic_jwt.inject_user() @microservice_sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.get('user_id')}) sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = microservice_app.test_client.get('/protected/user', headers={'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 123 _, response = microservice_app.test_client.get('/protected/user') assert response.status == 401 def test_redirect_without_url(app): sanic_app, sanic_jwt = app @sanic_app.route('/index.html') def index(request): return html('<html><body>Home</body></html>') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) request, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 assert response.body == b'<html><body>Home</body></html>' assert response.history assert response.history[0].status_code == 302 <|reserved_special_token_0|> def test_redirect_with_configured_url(): sanic_app = Sanic('sanic-jwt-test') sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url= '/unprotected') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) _, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 and response.text == 'unprotected content' <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def test_forgotten_initialized_on_protected(): blueprint = Blueprint('Test') @blueprint.get('/protected') @protected() def protected_hello_world(request): return json({'message': 'hello world'}) @blueprint.route('/scoped') @scoped('something') async def scoped_endpoint(request): return json({'scoped': True}) app = Sanic('sanic-jwt-test') sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True) app.blueprint(blueprint, url_prefix='/test') _, response = app.test_client.post('/test/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanicjwt.config.access_token_name(), None) _, response = app.test_client.get('/test/protected', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' _, response = app.test_client.get('/test/scoped', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' def test_option_method_on_protected(app): sanic_app, sanic_jwt = app @sanic_app.route('/protected/options', methods=['OPTIONS']) @sanic_jwt.protected() async def my_protected_options(request): return text('', status=204) _, response = sanic_app.test_client.options('/protected/options') assert response.status == 204 <|reserved_special_token_0|> def test_inject_user_on_instance(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @sanic_jwt.inject_user() @sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 <|reserved_special_token_0|> def test_inject_user_with_auth_mode_off(app_with_retrieve_user): async def retrieve_user(request, payload, *args, **kwargs): return {'user_id': 123} microservice_app = Sanic('sanic-jwt-test') microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False, retrieve_user=retrieve_user) @microservice_app.route('/protected/user') @microservice_sanic_jwt.inject_user() @microservice_sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.get('user_id')}) sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = microservice_app.test_client.get('/protected/user', headers={'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 123 _, response = microservice_app.test_client.get('/protected/user') assert response.status == 401 def test_redirect_without_url(app): sanic_app, sanic_jwt = app @sanic_app.route('/index.html') def index(request): return html('<html><body>Home</body></html>') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) request, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 assert response.body == b'<html><body>Home</body></html>' assert response.history assert response.history[0].status_code == 302 <|reserved_special_token_0|> def test_redirect_with_configured_url(): sanic_app = Sanic('sanic-jwt-test') sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url= '/unprotected') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) _, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 and response.text == 'unprotected content' def test_authenticated_redirect(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('protected content', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/protected/static', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 and response.text == 'protected content' <|reserved_special_token_1|> <|reserved_special_token_0|> def test_forgotten_initialized_on_protected(): blueprint = Blueprint('Test') @blueprint.get('/protected') @protected() def protected_hello_world(request): return json({'message': 'hello world'}) @blueprint.route('/scoped') @scoped('something') async def scoped_endpoint(request): return json({'scoped': True}) app = Sanic('sanic-jwt-test') sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True) app.blueprint(blueprint, url_prefix='/test') _, response = app.test_client.post('/test/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanicjwt.config.access_token_name(), None) _, response = app.test_client.get('/test/protected', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' _, response = app.test_client.get('/test/scoped', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' def test_option_method_on_protected(app): sanic_app, sanic_jwt = app @sanic_app.route('/protected/options', methods=['OPTIONS']) @sanic_jwt.protected() async def my_protected_options(request): return text('', status=204) _, response = sanic_app.test_client.options('/protected/options') assert response.status == 204 def test_inject_user_regular(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @inject_user() @protected() async def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 def test_inject_user_on_instance(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @sanic_jwt.inject_user() @sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 <|reserved_special_token_0|> def test_inject_user_with_auth_mode_off(app_with_retrieve_user): async def retrieve_user(request, payload, *args, **kwargs): return {'user_id': 123} microservice_app = Sanic('sanic-jwt-test') microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False, retrieve_user=retrieve_user) @microservice_app.route('/protected/user') @microservice_sanic_jwt.inject_user() @microservice_sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.get('user_id')}) sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = microservice_app.test_client.get('/protected/user', headers={'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 123 _, response = microservice_app.test_client.get('/protected/user') assert response.status == 401 def test_redirect_without_url(app): sanic_app, sanic_jwt = app @sanic_app.route('/index.html') def index(request): return html('<html><body>Home</body></html>') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) request, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 assert response.body == b'<html><body>Home</body></html>' assert response.history assert response.history[0].status_code == 302 <|reserved_special_token_0|> def test_redirect_with_configured_url(): sanic_app = Sanic('sanic-jwt-test') sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url= '/unprotected') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) _, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 and response.text == 'unprotected content' def test_authenticated_redirect(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('protected content', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/protected/static', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 and response.text == 'protected content' <|reserved_special_token_1|> <|reserved_special_token_0|> def test_forgotten_initialized_on_protected(): blueprint = Blueprint('Test') @blueprint.get('/protected') @protected() def protected_hello_world(request): return json({'message': 'hello world'}) @blueprint.route('/scoped') @scoped('something') async def scoped_endpoint(request): return json({'scoped': True}) app = Sanic('sanic-jwt-test') sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True) app.blueprint(blueprint, url_prefix='/test') _, response = app.test_client.post('/test/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanicjwt.config.access_token_name(), None) _, response = app.test_client.get('/test/protected', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' _, response = app.test_client.get('/test/scoped', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 500 assert response.json.get('exception') == 'SanicJWTException' def test_option_method_on_protected(app): sanic_app, sanic_jwt = app @sanic_app.route('/protected/options', methods=['OPTIONS']) @sanic_jwt.protected() async def my_protected_options(request): return text('', status=204) _, response = sanic_app.test_client.options('/protected/options') assert response.status == 204 def test_inject_user_regular(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @inject_user() @protected() async def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 def test_inject_user_on_instance(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @sanic_jwt.inject_user() @sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 def test_inject_user_on_instance_bp(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @sanic_jwt.inject_user() @sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 def test_inject_user_on_instance_non_async(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/user') @sanic_jwt.inject_user() @sanic_jwt.protected() def my_protected_user(request, user): return json({'user_id': user.user_id}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/auth/me', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.json.get('me').get('user_id') == 1 _, response = sanic_app.test_client.get('/protected/user', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 1 def test_inject_user_with_auth_mode_off(app_with_retrieve_user): async def retrieve_user(request, payload, *args, **kwargs): return {'user_id': 123} microservice_app = Sanic('sanic-jwt-test') microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False, retrieve_user=retrieve_user) @microservice_app.route('/protected/user') @microservice_sanic_jwt.inject_user() @microservice_sanic_jwt.protected() async def my_protected_user(request, user): return json({'user_id': user.get('user_id')}) sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = microservice_app.test_client.get('/protected/user', headers={'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 assert response.json.get('user_id') == 123 _, response = microservice_app.test_client.get('/protected/user') assert response.status == 401 def test_redirect_without_url(app): sanic_app, sanic_jwt = app @sanic_app.route('/index.html') def index(request): return html('<html><body>Home</body></html>') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) request, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 assert response.body == b'<html><body>Home</body></html>' assert response.history assert response.history[0].status_code == 302 def test_redirect_with_decorator_url(app): sanic_app, sanic_jwt = app @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True, redirect_url='/unprotected') async def my_protected_static(request): return text('', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) _, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 and response.text == 'unprotected content' def test_redirect_with_configured_url(): sanic_app = Sanic('sanic-jwt-test') sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url= '/unprotected') @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) _, response = sanic_app.test_client.get('/protected/static') assert response.status == 200 and response.text == 'unprotected content' def test_authenticated_redirect(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post('/auth', json={'username': 'user1', 'password': 'abcxyz'}) sanic_app.router.reset() @sanic_app.route('/protected/static') @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text('protected content', status=200) @sanic_app.route('/unprotected') async def my_unprotected_goto(request): return text('unprotected content', status=200) access_token = response.json.get(sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get('/protected/static', headers={ 'Authorization': 'Bearer {}'.format(access_token)}) assert response.status == 200 and response.text == 'protected content' <|reserved_special_token_1|> from sanic import Sanic from sanic.blueprints import Blueprint from sanic.response import html, json, text from sanic_jwt import Initialize from sanic_jwt.decorators import inject_user, protected, scoped def test_forgotten_initialized_on_protected(): blueprint = Blueprint("Test") @blueprint.get("/protected") @protected() def protected_hello_world(request): return json({"message": "hello world"}) @blueprint.route("/scoped") @scoped("something") async def scoped_endpoint(request): return json({"scoped": True}) app = Sanic("sanic-jwt-test") sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True) app.blueprint(blueprint, url_prefix="/test") _, response = app.test_client.post( "/test/auth", json={"username": "user1", "password": "abcxyz"} ) access_token = response.json.get(sanicjwt.config.access_token_name(), None) _, response = app.test_client.get( "/test/protected", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 500 assert response.json.get("exception") == "SanicJWTException" _, response = app.test_client.get( "/test/scoped", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 500 assert response.json.get("exception") == "SanicJWTException" def test_option_method_on_protected(app): sanic_app, sanic_jwt = app @sanic_app.route("/protected/options", methods=["OPTIONS"]) @sanic_jwt.protected() async def my_protected_options(request): return text("", status=204) _, response = sanic_app.test_client.options("/protected/options") assert response.status == 204 def test_inject_user_regular(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post( "/auth", json={"username": "user1", "password": "abcxyz"} ) sanic_app.router.reset() @sanic_app.route("/protected/user") @inject_user() @protected() async def my_protected_user(request, user): return json({"user_id": user.user_id}) access_token = response.json.get( sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get( "/auth/me", headers={"Authorization": "Bearer {}".format(access_token)} ) assert response.json.get("me").get("user_id") == 1 _, response = sanic_app.test_client.get( "/protected/user", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 200 assert response.json.get("user_id") == 1 def test_inject_user_on_instance(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post( "/auth", json={"username": "user1", "password": "abcxyz"} ) sanic_app.router.reset() @sanic_app.route("/protected/user") @sanic_jwt.inject_user() @sanic_jwt.protected() async def my_protected_user(request, user): return json({"user_id": user.user_id}) access_token = response.json.get( sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get( "/auth/me", headers={"Authorization": "Bearer {}".format(access_token)} ) assert response.json.get("me").get("user_id") == 1 _, response = sanic_app.test_client.get( "/protected/user", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 200 assert response.json.get("user_id") == 1 def test_inject_user_on_instance_bp(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post( "/auth", json={"username": "user1", "password": "abcxyz"} ) sanic_app.router.reset() @sanic_app.route("/protected/user") @sanic_jwt.inject_user() @sanic_jwt.protected() async def my_protected_user(request, user): return json({"user_id": user.user_id}) access_token = response.json.get( sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get( "/auth/me", headers={"Authorization": "Bearer {}".format(access_token)} ) assert response.json.get("me").get("user_id") == 1 _, response = sanic_app.test_client.get( "/protected/user", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 200 assert response.json.get("user_id") == 1 def test_inject_user_on_instance_non_async(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post( "/auth", json={"username": "user1", "password": "abcxyz"} ) sanic_app.router.reset() @sanic_app.route("/protected/user") @sanic_jwt.inject_user() @sanic_jwt.protected() def my_protected_user(request, user): return json({"user_id": user.user_id}) access_token = response.json.get( sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get( "/auth/me", headers={"Authorization": "Bearer {}".format(access_token)} ) assert response.json.get("me").get("user_id") == 1 _, response = sanic_app.test_client.get( "/protected/user", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 200 assert response.json.get("user_id") == 1 def test_inject_user_with_auth_mode_off(app_with_retrieve_user): async def retrieve_user(request, payload, *args, **kwargs): return {"user_id": 123} microservice_app = Sanic("sanic-jwt-test") microservice_sanic_jwt = Initialize( microservice_app, auth_mode=False, retrieve_user=retrieve_user ) @microservice_app.route("/protected/user") @microservice_sanic_jwt.inject_user() @microservice_sanic_jwt.protected() async def my_protected_user(request, user): return json({"user_id": user.get("user_id")}) sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post( "/auth", json={"username": "user1", "password": "abcxyz"} ) access_token = response.json.get( sanic_jwt.config.access_token_name(), None ) _, response = microservice_app.test_client.get( "/protected/user", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 200 assert response.json.get("user_id") == 123 _, response = microservice_app.test_client.get("/protected/user") assert response.status == 401 def test_redirect_without_url(app): sanic_app, sanic_jwt = app @sanic_app.route("/index.html") def index(request): return html("<html><body>Home</body></html>") @sanic_app.route("/protected/static") @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text("", status=200) request, response = sanic_app.test_client.get("/protected/static") assert response.status == 200 assert response.body == b"<html><body>Home</body></html>" assert response.history assert response.history[0].status_code == 302 def test_redirect_with_decorator_url(app): sanic_app, sanic_jwt = app @sanic_app.route("/protected/static") @sanic_jwt.protected(redirect_on_fail=True, redirect_url="/unprotected") async def my_protected_static(request): return text("", status=200) @sanic_app.route("/unprotected") async def my_unprotected_goto(request): return text("unprotected content", status=200) _, response = sanic_app.test_client.get("/protected/static") assert response.status == 200 and response.text == "unprotected content" def test_redirect_with_configured_url(): sanic_app = Sanic("sanic-jwt-test") sanic_jwt = Initialize( sanic_app, auth_mode=False, login_redirect_url="/unprotected" ) @sanic_app.route("/protected/static") @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text("", status=200) @sanic_app.route("/unprotected") async def my_unprotected_goto(request): return text("unprotected content", status=200) _, response = sanic_app.test_client.get("/protected/static") assert response.status == 200 and response.text == "unprotected content" def test_authenticated_redirect(app_with_retrieve_user): sanic_app, sanic_jwt = app_with_retrieve_user _, response = sanic_app.test_client.post( "/auth", json={"username": "user1", "password": "abcxyz"} ) sanic_app.router.reset() @sanic_app.route("/protected/static") @sanic_jwt.protected(redirect_on_fail=True) async def my_protected_static(request): return text("protected content", status=200) @sanic_app.route("/unprotected") async def my_unprotected_goto(request): return text("unprotected content", status=200) access_token = response.json.get( sanic_jwt.config.access_token_name(), None ) _, response = sanic_app.test_client.get( "/protected/static", headers={"Authorization": "Bearer {}".format(access_token)}, ) assert response.status == 200 and response.text == "protected content"
flexible
{ "blob_id": "55fc197eebc4e06466e0fc0458957d0460602eef", "index": 2032, "step-1": "<mask token>\n\n\ndef test_forgotten_initialized_on_protected():\n blueprint = Blueprint('Test')\n\n @blueprint.get('/protected')\n @protected()\n def protected_hello_world(request):\n return json({'message': 'hello world'})\n\n @blueprint.route('/scoped')\n @scoped('something')\n async def scoped_endpoint(request):\n return json({'scoped': True})\n app = Sanic('sanic-jwt-test')\n sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)\n app.blueprint(blueprint, url_prefix='/test')\n _, response = app.test_client.post('/test/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanicjwt.config.access_token_name(), None)\n _, response = app.test_client.get('/test/protected', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n _, response = app.test_client.get('/test/scoped', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n\n\ndef test_option_method_on_protected(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/protected/options', methods=['OPTIONS'])\n @sanic_jwt.protected()\n async def my_protected_options(request):\n return text('', status=204)\n _, response = sanic_app.test_client.options('/protected/options')\n assert response.status == 204\n\n\n<mask token>\n\n\ndef test_inject_user_on_instance(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\n<mask token>\n\n\ndef test_inject_user_with_auth_mode_off(app_with_retrieve_user):\n\n async def retrieve_user(request, payload, *args, **kwargs):\n return {'user_id': 123}\n microservice_app = Sanic('sanic-jwt-test')\n microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,\n retrieve_user=retrieve_user)\n\n @microservice_app.route('/protected/user')\n @microservice_sanic_jwt.inject_user()\n @microservice_sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.get('user_id')})\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = microservice_app.test_client.get('/protected/user',\n headers={'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 123\n _, response = microservice_app.test_client.get('/protected/user')\n assert response.status == 401\n\n\ndef test_redirect_without_url(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/index.html')\n def index(request):\n return html('<html><body>Home</body></html>')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n request, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200\n assert response.body == b'<html><body>Home</body></html>'\n assert response.history\n assert response.history[0].status_code == 302\n\n\n<mask token>\n\n\ndef test_redirect_with_configured_url():\n sanic_app = Sanic('sanic-jwt-test')\n sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=\n '/unprotected')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n _, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200 and response.text == 'unprotected content'\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef test_forgotten_initialized_on_protected():\n blueprint = Blueprint('Test')\n\n @blueprint.get('/protected')\n @protected()\n def protected_hello_world(request):\n return json({'message': 'hello world'})\n\n @blueprint.route('/scoped')\n @scoped('something')\n async def scoped_endpoint(request):\n return json({'scoped': True})\n app = Sanic('sanic-jwt-test')\n sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)\n app.blueprint(blueprint, url_prefix='/test')\n _, response = app.test_client.post('/test/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanicjwt.config.access_token_name(), None)\n _, response = app.test_client.get('/test/protected', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n _, response = app.test_client.get('/test/scoped', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n\n\ndef test_option_method_on_protected(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/protected/options', methods=['OPTIONS'])\n @sanic_jwt.protected()\n async def my_protected_options(request):\n return text('', status=204)\n _, response = sanic_app.test_client.options('/protected/options')\n assert response.status == 204\n\n\n<mask token>\n\n\ndef test_inject_user_on_instance(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\n<mask token>\n\n\ndef test_inject_user_with_auth_mode_off(app_with_retrieve_user):\n\n async def retrieve_user(request, payload, *args, **kwargs):\n return {'user_id': 123}\n microservice_app = Sanic('sanic-jwt-test')\n microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,\n retrieve_user=retrieve_user)\n\n @microservice_app.route('/protected/user')\n @microservice_sanic_jwt.inject_user()\n @microservice_sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.get('user_id')})\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = microservice_app.test_client.get('/protected/user',\n headers={'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 123\n _, response = microservice_app.test_client.get('/protected/user')\n assert response.status == 401\n\n\ndef test_redirect_without_url(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/index.html')\n def index(request):\n return html('<html><body>Home</body></html>')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n request, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200\n assert response.body == b'<html><body>Home</body></html>'\n assert response.history\n assert response.history[0].status_code == 302\n\n\n<mask token>\n\n\ndef test_redirect_with_configured_url():\n sanic_app = Sanic('sanic-jwt-test')\n sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=\n '/unprotected')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n _, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200 and response.text == 'unprotected content'\n\n\ndef test_authenticated_redirect(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('protected content', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/protected/static', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200 and response.text == 'protected content'\n", "step-3": "<mask token>\n\n\ndef test_forgotten_initialized_on_protected():\n blueprint = Blueprint('Test')\n\n @blueprint.get('/protected')\n @protected()\n def protected_hello_world(request):\n return json({'message': 'hello world'})\n\n @blueprint.route('/scoped')\n @scoped('something')\n async def scoped_endpoint(request):\n return json({'scoped': True})\n app = Sanic('sanic-jwt-test')\n sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)\n app.blueprint(blueprint, url_prefix='/test')\n _, response = app.test_client.post('/test/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanicjwt.config.access_token_name(), None)\n _, response = app.test_client.get('/test/protected', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n _, response = app.test_client.get('/test/scoped', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n\n\ndef test_option_method_on_protected(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/protected/options', methods=['OPTIONS'])\n @sanic_jwt.protected()\n async def my_protected_options(request):\n return text('', status=204)\n _, response = sanic_app.test_client.options('/protected/options')\n assert response.status == 204\n\n\ndef test_inject_user_regular(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @inject_user()\n @protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\ndef test_inject_user_on_instance(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\n<mask token>\n\n\ndef test_inject_user_with_auth_mode_off(app_with_retrieve_user):\n\n async def retrieve_user(request, payload, *args, **kwargs):\n return {'user_id': 123}\n microservice_app = Sanic('sanic-jwt-test')\n microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,\n retrieve_user=retrieve_user)\n\n @microservice_app.route('/protected/user')\n @microservice_sanic_jwt.inject_user()\n @microservice_sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.get('user_id')})\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = microservice_app.test_client.get('/protected/user',\n headers={'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 123\n _, response = microservice_app.test_client.get('/protected/user')\n assert response.status == 401\n\n\ndef test_redirect_without_url(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/index.html')\n def index(request):\n return html('<html><body>Home</body></html>')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n request, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200\n assert response.body == b'<html><body>Home</body></html>'\n assert response.history\n assert response.history[0].status_code == 302\n\n\n<mask token>\n\n\ndef test_redirect_with_configured_url():\n sanic_app = Sanic('sanic-jwt-test')\n sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=\n '/unprotected')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n _, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200 and response.text == 'unprotected content'\n\n\ndef test_authenticated_redirect(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('protected content', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/protected/static', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200 and response.text == 'protected content'\n", "step-4": "<mask token>\n\n\ndef test_forgotten_initialized_on_protected():\n blueprint = Blueprint('Test')\n\n @blueprint.get('/protected')\n @protected()\n def protected_hello_world(request):\n return json({'message': 'hello world'})\n\n @blueprint.route('/scoped')\n @scoped('something')\n async def scoped_endpoint(request):\n return json({'scoped': True})\n app = Sanic('sanic-jwt-test')\n sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)\n app.blueprint(blueprint, url_prefix='/test')\n _, response = app.test_client.post('/test/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanicjwt.config.access_token_name(), None)\n _, response = app.test_client.get('/test/protected', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n _, response = app.test_client.get('/test/scoped', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 500\n assert response.json.get('exception') == 'SanicJWTException'\n\n\ndef test_option_method_on_protected(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/protected/options', methods=['OPTIONS'])\n @sanic_jwt.protected()\n async def my_protected_options(request):\n return text('', status=204)\n _, response = sanic_app.test_client.options('/protected/options')\n assert response.status == 204\n\n\ndef test_inject_user_regular(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @inject_user()\n @protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\ndef test_inject_user_on_instance(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\ndef test_inject_user_on_instance_bp(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\ndef test_inject_user_on_instance_non_async(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/user')\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n def my_protected_user(request, user):\n return json({'user_id': user.user_id})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/auth/me', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.json.get('me').get('user_id') == 1\n _, response = sanic_app.test_client.get('/protected/user', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 1\n\n\ndef test_inject_user_with_auth_mode_off(app_with_retrieve_user):\n\n async def retrieve_user(request, payload, *args, **kwargs):\n return {'user_id': 123}\n microservice_app = Sanic('sanic-jwt-test')\n microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,\n retrieve_user=retrieve_user)\n\n @microservice_app.route('/protected/user')\n @microservice_sanic_jwt.inject_user()\n @microservice_sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({'user_id': user.get('user_id')})\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = microservice_app.test_client.get('/protected/user',\n headers={'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200\n assert response.json.get('user_id') == 123\n _, response = microservice_app.test_client.get('/protected/user')\n assert response.status == 401\n\n\ndef test_redirect_without_url(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/index.html')\n def index(request):\n return html('<html><body>Home</body></html>')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n request, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200\n assert response.body == b'<html><body>Home</body></html>'\n assert response.history\n assert response.history[0].status_code == 302\n\n\ndef test_redirect_with_decorator_url(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True, redirect_url='/unprotected')\n async def my_protected_static(request):\n return text('', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n _, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200 and response.text == 'unprotected content'\n\n\ndef test_redirect_with_configured_url():\n sanic_app = Sanic('sanic-jwt-test')\n sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=\n '/unprotected')\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n _, response = sanic_app.test_client.get('/protected/static')\n assert response.status == 200 and response.text == 'unprotected content'\n\n\ndef test_authenticated_redirect(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post('/auth', json={'username':\n 'user1', 'password': 'abcxyz'})\n sanic_app.router.reset()\n\n @sanic_app.route('/protected/static')\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text('protected content', status=200)\n\n @sanic_app.route('/unprotected')\n async def my_unprotected_goto(request):\n return text('unprotected content', status=200)\n access_token = response.json.get(sanic_jwt.config.access_token_name(), None\n )\n _, response = sanic_app.test_client.get('/protected/static', headers={\n 'Authorization': 'Bearer {}'.format(access_token)})\n assert response.status == 200 and response.text == 'protected content'\n", "step-5": "from sanic import Sanic\nfrom sanic.blueprints import Blueprint\nfrom sanic.response import html, json, text\n\nfrom sanic_jwt import Initialize\nfrom sanic_jwt.decorators import inject_user, protected, scoped\n\n\ndef test_forgotten_initialized_on_protected():\n blueprint = Blueprint(\"Test\")\n\n @blueprint.get(\"/protected\")\n @protected()\n def protected_hello_world(request):\n return json({\"message\": \"hello world\"})\n\n @blueprint.route(\"/scoped\")\n @scoped(\"something\")\n async def scoped_endpoint(request):\n return json({\"scoped\": True})\n\n app = Sanic(\"sanic-jwt-test\")\n\n sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)\n\n app.blueprint(blueprint, url_prefix=\"/test\")\n\n _, response = app.test_client.post(\n \"/test/auth\", json={\"username\": \"user1\", \"password\": \"abcxyz\"}\n )\n\n access_token = response.json.get(sanicjwt.config.access_token_name(), None)\n\n _, response = app.test_client.get(\n \"/test/protected\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n\n assert response.status == 500\n assert response.json.get(\"exception\") == \"SanicJWTException\"\n\n _, response = app.test_client.get(\n \"/test/scoped\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n\n assert response.status == 500\n assert response.json.get(\"exception\") == \"SanicJWTException\"\n\n\ndef test_option_method_on_protected(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route(\"/protected/options\", methods=[\"OPTIONS\"])\n @sanic_jwt.protected()\n async def my_protected_options(request):\n return text(\"\", status=204)\n\n _, response = sanic_app.test_client.options(\"/protected/options\")\n\n assert response.status == 204\n\n\ndef test_inject_user_regular(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post(\n \"/auth\", json={\"username\": \"user1\", \"password\": \"abcxyz\"}\n )\n\n sanic_app.router.reset()\n\n @sanic_app.route(\"/protected/user\")\n @inject_user()\n @protected()\n async def my_protected_user(request, user):\n return json({\"user_id\": user.user_id})\n\n access_token = response.json.get(\n sanic_jwt.config.access_token_name(), None\n )\n\n _, response = sanic_app.test_client.get(\n \"/auth/me\", headers={\"Authorization\": \"Bearer {}\".format(access_token)}\n )\n\n assert response.json.get(\"me\").get(\"user_id\") == 1\n\n _, response = sanic_app.test_client.get(\n \"/protected/user\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n assert response.status == 200\n assert response.json.get(\"user_id\") == 1\n\n\ndef test_inject_user_on_instance(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post(\n \"/auth\", json={\"username\": \"user1\", \"password\": \"abcxyz\"}\n )\n\n sanic_app.router.reset()\n\n @sanic_app.route(\"/protected/user\")\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({\"user_id\": user.user_id})\n\n access_token = response.json.get(\n sanic_jwt.config.access_token_name(), None\n )\n\n _, response = sanic_app.test_client.get(\n \"/auth/me\", headers={\"Authorization\": \"Bearer {}\".format(access_token)}\n )\n\n assert response.json.get(\"me\").get(\"user_id\") == 1\n\n _, response = sanic_app.test_client.get(\n \"/protected/user\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n assert response.status == 200\n assert response.json.get(\"user_id\") == 1\n\n\ndef test_inject_user_on_instance_bp(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post(\n \"/auth\", json={\"username\": \"user1\", \"password\": \"abcxyz\"}\n )\n\n sanic_app.router.reset()\n\n @sanic_app.route(\"/protected/user\")\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({\"user_id\": user.user_id})\n\n access_token = response.json.get(\n sanic_jwt.config.access_token_name(), None\n )\n\n _, response = sanic_app.test_client.get(\n \"/auth/me\", headers={\"Authorization\": \"Bearer {}\".format(access_token)}\n )\n\n assert response.json.get(\"me\").get(\"user_id\") == 1\n\n _, response = sanic_app.test_client.get(\n \"/protected/user\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n assert response.status == 200\n assert response.json.get(\"user_id\") == 1\n\n\ndef test_inject_user_on_instance_non_async(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post(\n \"/auth\", json={\"username\": \"user1\", \"password\": \"abcxyz\"}\n )\n\n sanic_app.router.reset()\n\n @sanic_app.route(\"/protected/user\")\n @sanic_jwt.inject_user()\n @sanic_jwt.protected()\n def my_protected_user(request, user):\n return json({\"user_id\": user.user_id})\n\n access_token = response.json.get(\n sanic_jwt.config.access_token_name(), None\n )\n\n _, response = sanic_app.test_client.get(\n \"/auth/me\", headers={\"Authorization\": \"Bearer {}\".format(access_token)}\n )\n\n assert response.json.get(\"me\").get(\"user_id\") == 1\n\n _, response = sanic_app.test_client.get(\n \"/protected/user\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n assert response.status == 200\n assert response.json.get(\"user_id\") == 1\n\n\ndef test_inject_user_with_auth_mode_off(app_with_retrieve_user):\n async def retrieve_user(request, payload, *args, **kwargs):\n return {\"user_id\": 123}\n\n microservice_app = Sanic(\"sanic-jwt-test\")\n microservice_sanic_jwt = Initialize(\n microservice_app, auth_mode=False, retrieve_user=retrieve_user\n )\n\n @microservice_app.route(\"/protected/user\")\n @microservice_sanic_jwt.inject_user()\n @microservice_sanic_jwt.protected()\n async def my_protected_user(request, user):\n return json({\"user_id\": user.get(\"user_id\")})\n\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post(\n \"/auth\", json={\"username\": \"user1\", \"password\": \"abcxyz\"}\n )\n\n access_token = response.json.get(\n sanic_jwt.config.access_token_name(), None\n )\n\n _, response = microservice_app.test_client.get(\n \"/protected/user\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n\n assert response.status == 200\n assert response.json.get(\"user_id\") == 123\n\n _, response = microservice_app.test_client.get(\"/protected/user\")\n\n assert response.status == 401\n\n\ndef test_redirect_without_url(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route(\"/index.html\")\n def index(request):\n return html(\"<html><body>Home</body></html>\")\n\n @sanic_app.route(\"/protected/static\")\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text(\"\", status=200)\n\n request, response = sanic_app.test_client.get(\"/protected/static\")\n\n assert response.status == 200\n assert response.body == b\"<html><body>Home</body></html>\"\n assert response.history\n assert response.history[0].status_code == 302\n\n\ndef test_redirect_with_decorator_url(app):\n sanic_app, sanic_jwt = app\n\n @sanic_app.route(\"/protected/static\")\n @sanic_jwt.protected(redirect_on_fail=True, redirect_url=\"/unprotected\")\n async def my_protected_static(request):\n return text(\"\", status=200)\n\n @sanic_app.route(\"/unprotected\")\n async def my_unprotected_goto(request):\n return text(\"unprotected content\", status=200)\n\n _, response = sanic_app.test_client.get(\"/protected/static\")\n\n assert response.status == 200 and response.text == \"unprotected content\"\n\n\ndef test_redirect_with_configured_url():\n sanic_app = Sanic(\"sanic-jwt-test\")\n sanic_jwt = Initialize(\n sanic_app, auth_mode=False, login_redirect_url=\"/unprotected\"\n )\n\n @sanic_app.route(\"/protected/static\")\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text(\"\", status=200)\n\n @sanic_app.route(\"/unprotected\")\n async def my_unprotected_goto(request):\n return text(\"unprotected content\", status=200)\n\n _, response = sanic_app.test_client.get(\"/protected/static\")\n\n assert response.status == 200 and response.text == \"unprotected content\"\n\n\ndef test_authenticated_redirect(app_with_retrieve_user):\n sanic_app, sanic_jwt = app_with_retrieve_user\n _, response = sanic_app.test_client.post(\n \"/auth\", json={\"username\": \"user1\", \"password\": \"abcxyz\"}\n )\n\n sanic_app.router.reset()\n\n @sanic_app.route(\"/protected/static\")\n @sanic_jwt.protected(redirect_on_fail=True)\n async def my_protected_static(request):\n return text(\"protected content\", status=200)\n\n @sanic_app.route(\"/unprotected\")\n async def my_unprotected_goto(request):\n return text(\"unprotected content\", status=200)\n\n access_token = response.json.get(\n sanic_jwt.config.access_token_name(), None\n )\n\n _, response = sanic_app.test_client.get(\n \"/protected/static\",\n headers={\"Authorization\": \"Bearer {}\".format(access_token)},\n )\n\n assert response.status == 200 and response.text == \"protected content\"\n", "step-ids": [ 6, 7, 8, 11, 13 ] }
[ 6, 7, 8, 11, 13 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> print(X) <|reserved_special_token_0|> print(A * B) print(X[0]) print(X[0][1]) for row in X: print(row) <|reserved_special_token_0|> print(newX) print(X > 15) <|reserved_special_token_0|> plt.plot(x, y) plt.show() <|reserved_special_token_0|> plt.plot(x, y1, label='sin') plt.plot(x, y2, linestyle='--', label='cos') plt.xlabel('x') plt.ylabel('y') plt.title('sin & cos') plt.legend() plt.show() <|reserved_special_token_0|> plt.imshow(img) plt.show() <|reserved_special_token_1|> <|reserved_special_token_0|> X = np.array([[51, 55], [14, 19], [0, 4]]) print(X) A = np.array([[1, 2], [3, 4]]) B = np.array([10, 20]) print(A * B) print(X[0]) print(X[0][1]) for row in X: print(row) newX = X.flatten() print(newX) print(X > 15) x = np.arange(0, 6, 0.1) y = np.sin(x) plt.plot(x, y) plt.show() y1 = np.sin(x) y2 = np.cos(x) plt.plot(x, y1, label='sin') plt.plot(x, y2, linestyle='--', label='cos') plt.xlabel('x') plt.ylabel('y') plt.title('sin & cos') plt.legend() plt.show() img = imread('/Users/jiwon/Downloads/R800x0.png') plt.imshow(img) plt.show() <|reserved_special_token_1|> import numpy as np import matplotlib.pyplot as plt from matplotlib.image import imread X = np.array([[51, 55], [14, 19], [0, 4]]) print(X) A = np.array([[1, 2], [3, 4]]) B = np.array([10, 20]) print(A * B) print(X[0]) print(X[0][1]) for row in X: print(row) newX = X.flatten() print(newX) print(X > 15) x = np.arange(0, 6, 0.1) y = np.sin(x) plt.plot(x, y) plt.show() y1 = np.sin(x) y2 = np.cos(x) plt.plot(x, y1, label='sin') plt.plot(x, y2, linestyle='--', label='cos') plt.xlabel('x') plt.ylabel('y') plt.title('sin & cos') plt.legend() plt.show() img = imread('/Users/jiwon/Downloads/R800x0.png') plt.imshow(img) plt.show() <|reserved_special_token_1|> import numpy as np import matplotlib.pyplot as plt from matplotlib.image import imread X = np.array([[51, 55], [14, 19], [0, 4]]) print(X) A = np.array([[1, 2], [3, 4]]) B = np.array([10, 20]) print(A * B) print(X[0]) print(X[0][1]) for row in X: print(row) newX = X.flatten() print(newX) print(X > 15) # 데이터 준비 x = np.arange(0, 6, 0.1) # 0에서 6까지 0.1 간격으로 생 y = np.sin(x) # 그래프 그리기 plt.plot(x, y) plt.show() # 데이터 준비 y1 = np.sin(x) y2 = np.cos(x) # 그래프 그리기 plt.plot(x, y1, label="sin") plt.plot(x, y2, linestyle="--", label="cos") # cos 함수는 점선으로 그리기 plt.xlabel("x") # x축 이름 plt.ylabel("y") # y축 이름 plt.title('sin & cos') # 제목 plt.legend() plt.show() # 이미지 그리기 img = imread('/Users/jiwon/Downloads/R800x0.png') #이미지 읽어오기 plt.imshow(img) plt.show()
flexible
{ "blob_id": "ba702a9c5d9d31e48b047c106d77cf1707031d70", "index": 1795, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(X)\n<mask token>\nprint(A * B)\nprint(X[0])\nprint(X[0][1])\nfor row in X:\n print(row)\n<mask token>\nprint(newX)\nprint(X > 15)\n<mask token>\nplt.plot(x, y)\nplt.show()\n<mask token>\nplt.plot(x, y1, label='sin')\nplt.plot(x, y2, linestyle='--', label='cos')\nplt.xlabel('x')\nplt.ylabel('y')\nplt.title('sin & cos')\nplt.legend()\nplt.show()\n<mask token>\nplt.imshow(img)\nplt.show()\n", "step-3": "<mask token>\nX = np.array([[51, 55], [14, 19], [0, 4]])\nprint(X)\nA = np.array([[1, 2], [3, 4]])\nB = np.array([10, 20])\nprint(A * B)\nprint(X[0])\nprint(X[0][1])\nfor row in X:\n print(row)\nnewX = X.flatten()\nprint(newX)\nprint(X > 15)\nx = np.arange(0, 6, 0.1)\ny = np.sin(x)\nplt.plot(x, y)\nplt.show()\ny1 = np.sin(x)\ny2 = np.cos(x)\nplt.plot(x, y1, label='sin')\nplt.plot(x, y2, linestyle='--', label='cos')\nplt.xlabel('x')\nplt.ylabel('y')\nplt.title('sin & cos')\nplt.legend()\nplt.show()\nimg = imread('/Users/jiwon/Downloads/R800x0.png')\nplt.imshow(img)\nplt.show()\n", "step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.image import imread\nX = np.array([[51, 55], [14, 19], [0, 4]])\nprint(X)\nA = np.array([[1, 2], [3, 4]])\nB = np.array([10, 20])\nprint(A * B)\nprint(X[0])\nprint(X[0][1])\nfor row in X:\n print(row)\nnewX = X.flatten()\nprint(newX)\nprint(X > 15)\nx = np.arange(0, 6, 0.1)\ny = np.sin(x)\nplt.plot(x, y)\nplt.show()\ny1 = np.sin(x)\ny2 = np.cos(x)\nplt.plot(x, y1, label='sin')\nplt.plot(x, y2, linestyle='--', label='cos')\nplt.xlabel('x')\nplt.ylabel('y')\nplt.title('sin & cos')\nplt.legend()\nplt.show()\nimg = imread('/Users/jiwon/Downloads/R800x0.png')\nplt.imshow(img)\nplt.show()\n", "step-5": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.image import imread\n\nX = np.array([[51, 55], [14, 19], [0, 4]])\nprint(X)\n\nA = np.array([[1, 2], [3, 4]])\nB = np.array([10, 20])\nprint(A * B)\n\nprint(X[0])\nprint(X[0][1])\n\nfor row in X:\n print(row)\n\nnewX = X.flatten()\nprint(newX)\n\nprint(X > 15)\n\n# 데이터 준비\nx = np.arange(0, 6, 0.1) # 0에서 6까지 0.1 간격으로 생\ny = np.sin(x)\n\n# 그래프 그리기\nplt.plot(x, y)\nplt.show()\n\n# 데이터 준비\ny1 = np.sin(x)\ny2 = np.cos(x)\n\n# 그래프 그리기\nplt.plot(x, y1, label=\"sin\")\nplt.plot(x, y2, linestyle=\"--\", label=\"cos\") # cos 함수는 점선으로 그리기\nplt.xlabel(\"x\") # x축 이름\nplt.ylabel(\"y\") # y축 이름\nplt.title('sin & cos') # 제목\nplt.legend()\nplt.show()\n\n# 이미지 그리기\nimg = imread('/Users/jiwon/Downloads/R800x0.png') #이미지 읽어오기\n\nplt.imshow(img)\nplt.show()", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import numpy as np def shufflelists(lists): li = np.random.permutation(len(lists[0]) lo = [] for i in range(len(li)):
normal
{ "blob_id": "fc01c6fb812fe78ca04496494d68fcc90ae706f5", "index": 3605, "step-1": "import numpy as np\n\ndef shufflelists(lists):\n li = np.random.permutation(len(lists[0])\n lo = []\n for i in range(len(li)):\n \n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for line in fhand: line.tranc <|reserved_special_token_1|> <|reserved_special_token_0|> fhand = open('romeo-full.txt') counts = dict() for line in fhand: line.tranc <|reserved_special_token_1|> import string fhand = open('romeo-full.txt') counts = dict() for line in fhand: line.tranc <|reserved_special_token_1|> import string fhand = open("romeo-full.txt") counts = dict() for line in fhand: line.tranc
flexible
{ "blob_id": "5493887e32dbe7ae27eca79d28da8488183b37a3", "index": 8792, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor line in fhand:\n line.tranc\n", "step-3": "<mask token>\nfhand = open('romeo-full.txt')\ncounts = dict()\nfor line in fhand:\n line.tranc\n", "step-4": "import string\nfhand = open('romeo-full.txt')\ncounts = dict()\nfor line in fhand:\n line.tranc\n", "step-5": "import string\nfhand = open(\"romeo-full.txt\")\ncounts = dict()\nfor line in fhand:\n \n line.tranc", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from django.conf import settings from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden from django.views.decorators.csrf import csrf_exempt from linebot import LineBotApi, WebhookParser from linebot.exceptions import InvalidSignatureError, LineBotApiError from linebot.models import MessageEvent, TextMessage from module import func from urllib.parse import parse_qsl from func5api.models import users from django.shortcuts import render line_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN) parser = WebhookParser(settings.LINE_CHANNEL_SECRET) @csrf_exempt def callback(request): if request.method == 'POST': signature = request.META['HTTP_X_LINE_SIGNATURE'] body = request.body.decode('utf-8') try: events = parser.parse(body, signature) except InvalidSignatureError: return HttpResponseForbidden() except LineBotApiError: return HttpResponseBadRequest() for event in events: if isinstance(event, MessageEvent): user_id = event.source.user_id #取得user_id if not(users.objects.filter(uid = user_id).exists()): #將user_id存入資料庫中 unit = users.objects.create(uid = user_id) unit.save() #將user_id上傳至資料庫 if isinstance(event.message, TextMessage): mtext = event.message.text if mtext == '@修繕申請': func.sendFix(event, user_id) elif mtext =='@修繕查詢': func.fix_inquire(event, user_id) elif mtext == 'admin_mode': func.judge(event, mtext, user_id) elif mtext[:6] == '123456' and len(mtext) > 6: #all func.judge(event, mtext, user_id) elif mtext[:2] == '++' and len(mtext) > 2: #specify func.judge(event, mtext, user_id) elif mtext[:2] == '##' and len(mtext) > 2: func.manageForm(event, mtext, user_id) elif mtext[:3] == '!!!' and len(mtext) > 3: func.personData(event, mtext, user_id) return HttpResponse() else: return HttpResponseBadRequest() def listall(request): user = users.objects.all().order_by('name') return render(request, "listall.html", locals())
normal
{ "blob_id": "19f202c32e1cf9f7ab2663827f1f98080f70b83e", "index": 8313, "step-1": "<mask token>\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n", "step-3": "<mask token>\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n", "step-4": "from django.conf import settings\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\nfrom django.views.decorators.csrf import csrf_exempt\nfrom linebot import LineBotApi, WebhookParser\nfrom linebot.exceptions import InvalidSignatureError, LineBotApiError\nfrom linebot.models import MessageEvent, TextMessage\nfrom module import func\nfrom urllib.parse import parse_qsl\nfrom func5api.models import users\nfrom django.shortcuts import render\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n", "step-5": "from django.conf import settings\r\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\nfrom linebot import LineBotApi, WebhookParser\r\nfrom linebot.exceptions import InvalidSignatureError, LineBotApiError\r\nfrom linebot.models import MessageEvent, TextMessage\r\nfrom module import func\r\nfrom urllib.parse import parse_qsl\r\nfrom func5api.models import users\r\nfrom django.shortcuts import render\r\n\r\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\r\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\r\n\r\n@csrf_exempt\r\ndef callback(request):\r\n if request.method == 'POST':\r\n signature = request.META['HTTP_X_LINE_SIGNATURE']\r\n body = request.body.decode('utf-8')\r\n try:\r\n events = parser.parse(body, signature)\r\n except InvalidSignatureError:\r\n return HttpResponseForbidden()\r\n except LineBotApiError:\r\n return HttpResponseBadRequest()\r\n\r\n for event in events:\r\n if isinstance(event, MessageEvent):\r\n user_id = event.source.user_id #取得user_id\r\n if not(users.objects.filter(uid = user_id).exists()): #將user_id存入資料庫中\r\n unit = users.objects.create(uid = user_id)\r\n unit.save() #將user_id上傳至資料庫\r\n if isinstance(event.message, TextMessage):\r\n mtext = event.message.text\r\n if mtext == '@修繕申請':\r\n func.sendFix(event, user_id)\r\n elif mtext =='@修繕查詢':\r\n func.fix_inquire(event, user_id)\r\n elif mtext == 'admin_mode':\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:6] == '123456' and len(mtext) > 6: #all\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:2] == '++' and len(mtext) > 2: #specify\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:2] == '##' and len(mtext) > 2:\r\n func.manageForm(event, mtext, user_id)\r\n elif mtext[:3] == '!!!' and len(mtext) > 3:\r\n func.personData(event, mtext, user_id)\r\n \r\n return HttpResponse()\r\n\r\n else:\r\n return HttpResponseBadRequest()\r\n \r\ndef listall(request):\r\n user = users.objects.all().order_by('name')\r\n return render(request, \"listall.html\", locals())\r\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
# coding: utf-8 # ## Estimating Travel Time # # # The objective of this document is proposing a prediction model for estimating the travel time of two # specified locations at a given departure time. The main idea here is predicting the velocity of the trip. Given the distance between starting and ending point of the trip, it is possible to easily compute the Travel Time. # According to the given data, different features including the time of the day, day of the week, month, travel distance, and distance to the center of the city (New York) are used. # Different prediction models (Linear, GLM and Deep Neural Network) are compared, and the GLM is used for genrating the final results. # ## Preparation # Import required libraries # In[136]: import numpy as np import pandas as pd from geopy.distance import vincenty from datetime import datetime from datetime import timedelta from datetime import time import statsmodels.api as sm from sklearn.datasets import load_boston from sklearn.model_selection import train_test_split from sklearn.cross_validation import KFold from sklearn.preprocessing import MinMaxScaler from sklearn.linear_model import LinearRegression from sklearn.metrics import mean_absolute_error, mean_squared_error import matplotlib import matplotlib.pyplot as plt import seaborn as sns from keras.models import Sequential from keras.layers import Dense, Dropout, Activation, Flatten from keras.layers.normalization import BatchNormalization get_ipython().magic('matplotlib inline') # ## Reading data # In[169]: df_train = pd.read_csv('train.csv',index_col= 'row_id') df_test = pd.read_csv('test.csv',index_col= 'row_id') df_train.head() # ## Feature engineering # # It is clear that the travel time of trip depends on the starting and ending point. In other words, the most uncertain component in the prediction of travel time is the velocity of the trip. Given the velocity and the distance, it is easy to compute the duration of the travel. # # Also, I observed all travels in both train and test dataset are happening around New York City. Therefore, the main component in determining the velocity of is the city traffic. We know that traffic is a time-dependent phenomenon which depends on the time of the day, the day of the week, and month of the year. In addition, the traffic is usually heavier in Manhattan (downtown of the city) in comparing to the other point of the city. Therefore, if the starting or ending point of the travel is close to the Manhattan we expect higher traffic comparing to the other neighborhoods. In visualization section, I provide enough evidence from the data set to support the aforementioned claims. # # According to this observation the following features are computted by using the raw data and added to the dataframe. # # * Distance between starting and ending computted by vincenty formula # * The time of the day of travel (in sec far from the midnight) # * The day of the week (Monday, Tuesday, etc). For this categorical data, six dummy variables are added to datafram # * The month of the travel to capture seasnolity effect. # * The sequare of distance # * The velocity is used as the predication variable. # # In[156]: def distance(row): source = (row['start_lat'], row['start_lng']) dest = ( row['end_lat'], row['end_lng']) return vincenty(source,dest).miles Manhattan = (40.7831, -73.9712) def pickup_to_MH(row): '''find the distance between pick up point and Manhattan center''' source = (row['start_lat'], row['start_lng']) return vincenty(source,Manhattan).miles def dropoff_to_MH(row): '''find the distance between dropoff point and Manhattan center''' dest = ( row['end_lat'], row['end_lng']) return vincenty(dest,Manhattan).miles def day_of_week(ep): return datetime.fromtimestamp(ep).strftime("%A") def month(ep): return datetime.fromtimestamp(ep).month def time_of_day(ep): ref = datetime(2015, 1, 1, 0, 0, 0) sec = (datetime.fromtimestamp(ep)- ref).seconds return min(sec, 86400- sec) def year(ep): return datetime.fromtimestamp(ep).year def add_features(df_train_s): # Add day of the week and the dummy variable DD = df_train_s['start_timestamp'].map(day_of_week) df_train_s['day'] = DD DD = pd.get_dummies( DD,prefix='day', drop_first=True) df_train_s = pd.concat([df_train_s, DD],axis =1 ) # Month, time of the dat, df_train_s df_train_s['month'] = df_train_s['start_timestamp'].map(month) df_train_s['time_of_day'] = df_train_s['start_timestamp'].map(time_of_day) # distance between start and end of the trip df_train_s['distance'] = df_train_s.apply(lambda x :distance(x), axis=1 ) df_train_s['distance2'] = df_train_s['distance']**2 # distance between start, end, and center of Manhatan df_train_s['pickup_MH'] = df_train_s.apply(pickup_to_MH, axis=1 ) df_train_s['dropoff_MH'] = df_train_s.apply(dropoff_to_MH, axis=1 ) return df_train_s # Now, we can easily add all of the above features to both traing and test data set. Due to time limtation and calculation power I only used 10% of the traing data. # In[24]: np.random.seed(42) df_train_s = df_train.sample(frac=0.01, replace=False) df_train_s = add_features(df_train_s) df_train_s['velocity'] = np.array(df_train_s['distance']/(df_train_s['duration']/3600)) # In[25]: df_train_s.head() # In[170]: # adding the feature to test set. df_test = add_features(df_test) # ## Removing Outlires # The following functions are used to compute these features. Considering the speed limit and the fact the usual trafic in New York, it is reseanable to assume that always the speed show not exceed 90 mph. Therefore, I remove the points with more than this number as the outlires. Also, I removed the data with less than .5 mph. Specificlly, there exists many samples with zero distance between starting and ending point which might happen becouse GPS problem. # In[41]: df_train_s = df_train_s[df_train_s['velocity']<90] df_train_s = df_train_s[df_train_s['velocity']>.5] # ## Data Visulazation # # First we look at the starting and ending point of the trips which happens in New York. # # # # In[30]: fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(12, 6)) ax = df_train_s.plot.scatter( 'start_lat','start_lng', ax = axes[0], title='Start point of travel') ax.set(xlabel="latitude", ylabel='longitude') ax = df_train_s.plot.scatter('end_lng','end_lat', ax = axes[1], title='Destination of the travel') ax.set(xlabel="latitude", ylabel='longitude') plt.show() # Here are some statitcs about the volacity, distance of each trip and its duration. Also, we looked at the density function of the volacity. A log-normal or Gamma distribution are approprate candiatdes for this distribution. # In[42]: df_train_s[['distance', 'duration','velocity']].describe() # In[43]: df_train_s['velocity'].hist(bins=1000,normed=True) # ### Corrolation matrix # In[44]: corr = df_train_s.corr() # generate a mask for the lower triangle mask = np.zeros_like(corr, dtype=np.bool) mask[np.triu_indices_from(mask)] = True # set up the matplotlib figure f, ax = plt.subplots(figsize=(18, 18)) # generate a custom diverging colormap cmap = sns.diverging_palette(220, 10, as_cmap=True) # draw the heatmap with the mask and correct aspect ratio sns.heatmap(corr, mask=mask, cmap=cmap, vmax=.3, square=True, linewidths=.5, cbar_kws={"shrink": .5}, ax=ax) plt.show() # In[53]: df_train_s.plot.scatter( 'distance','velocity') # In[48]: ### Seanility and time Effect on Velocity gr= df_train_s[['velocity','month']].groupby(by='month') gr.mean().plot.bar(yerr=gr.std()) # ## Data preprocessing # # Let's split our data to train and test set in fraction of $\frac{4}{1}$ to facilate comparing the results. # This test set is differenet from the given test set. # In[105]: cl = list(set(df_train_s.keys())-{'velocity','duration','day'}) X = np.array(df_train_s[cl]) X1 = np.insert(X, 0, 1, axis=1) y = np.array(df_train_s['velocity']) X_train, X_test, y_train, y_test = train_test_split(X1, y, test_size=0.2, random_state=42) dist_train = X_train[:,1] dist_test = X_test[:,1] # In[106]: list(enumerate(cl)) dist_train.mean() # ## Linear Model # In[204]: model_sk = LinearRegression() model_sk.fit(X_train, y_train) plt.figure(figsize=(12, 8)) plt.bar(np.arange(model_sk.coef_.shape[0]) - 0.4, model_sk.coef_) plt.xticks(np.arange(model_sk.coef_.shape[0]), cl, rotation='vertical') plt.xlim([-1, model_sk.coef_.shape[0]]) plt.title("Linear model coefficients") plt.show() # The folling chart also provide better understading. Excepet X12 (dummy for sunday) all the other variables are significant; the p-value is zero and null-hypothesis is rejected. # In[205]: linear_model = sm.OLS(y_train, X_train) linear_results = linear_model.fit() print(linear_results.summary()) # ## Generalized Linear Model # I tried GLM with gamma fammaly. # In[206]: gamma_model = sm.GLM( y_train, X_train,family=sm.families.Gamma()) gamma_results = gamma_model.fit() print(gamma_results.summary()) # ## Deep Neural Network (DNN) # # Here, I am useing a DNN as a prediction model. I am using the Keras package to train the network. Network includes 3 layers. Also, between each two layer a dropout layer is add. RELU and softmax are used as the activation functions. Here, I define the model. # # I normilized the data the input data to imporve the performance. # In[195]: DNN_model = Sequential() DNN_model.add(Dense(100,input_dim=X_train.shape[1],init='uniform',activation='relu')) DNN_model.add(Dropout(0.5)) DNN_model.add(Dense(50,init='uniform',activation='softmax')) DNN_model.add(Dropout(0.5)) DNN_model.add(Dense(100,init='uniform',activation='relu')) DNN_model.add(Dropout(0.5)) DNN_model.add(Dense(1,init='uniform',activation='relu')) DNN_model.summary() # ### Fitting the DNN # In[196]: mn = X1.mean(axis=0) #model.compile(loss='mean_absolute_error',optimizer='adam',metrics='[accuracy]') DNN_model.compile(loss='mean_absolute_error',optimizer='adam') history = DNN_model.fit(X_train/mn,y_train, validation_data=(X_test/mn, y_test), epochs =100, batch_size=100, verbose=2) # In[197]: plt.figure(figsize=(10, 8)) plt.title("Dense model training", fontsize=12) plt.plot(history.history["loss"], label="Train") plt.plot(history.history["val_loss"], label="Test") plt.grid("on") plt.xlabel("Epoch", fontsize=12) plt.ylabel("loss", fontsize=12) plt.legend(loc="upper right") # ## Evalution # # In this part, I compare the propsed models and choose the best one. I compare the results based on mean absolute # error of predicted versus actual durations, and also mean absolute percentage error which is the percantge of the error. Note that here we compare based on duration as asked in the question and not the velocity. # # In[207]: preds_test, preds_train = {}, {} #Linear Model preds_test['linear'] = linear_results.predict(X_test) preds_train['linear'] = linear_results.predict(X_train) #GLM (Gamma Model) preds_test['GLM'] = gamma_results.predict(X_test) preds_train['GLM'] = gamma_results.predict(X_train) #Deep Learning preds_test['DL'] = np.squeeze(DNN_model.predict(X_test/mn)) preds_train['DL'] = np.squeeze(DNN_model.predict(X_train/mn)) # The functions are used for evalution # In[84]: def mean_absolute_error(dist,y_true, y_pred ): """ Args: dist(ndarray) : distance between pick up and drop off y_true(ndarray) : true velocity y_pred(ndarray) : the prediction value of velocity """ err = np.abs(dist/y_true - dist/y_pred) err = err[np.isfinite(err)] return np.mean(err) *3600 def mean_absolute_percentage_error(dist,y_true, y_pred ): """ Args: dist(ndarray) : distance between pick up and drop off y_true(ndarray) : true velocity y_pred(ndarray) : the prediction value of velocity """ err = np.abs(y_true/y_pred - 1) err = err[np.isfinite(err)] return np.mean(err)*100 def evalute(dist,y_true,prediction): MAE, MAPE= {}, {} for kys, y_pred in prediction.items(): MAE[kys] = mean_absolute_error(dist,y_true, y_pred ) MAPE[kys] = mean_absolute_percentage_error(dist,y_true, y_pred ) return MAE, MAPE # In[209]: MAE_train, MAPE_train = evalute(dist_train,y_train, preds_train) MAE_test, MAPE_test = evalute(dist_test,y_test, preds_test) pd.DataFrame([MAE_test,MAE_train, MAPE_test, MAPE_train], index= ['MAE_test', 'MAE_train', 'MAPE_test', 'MAPE_train'] ).transpose() # In[201]: dist_train.mean() # ## Generate Prediction for Test Set # # By comparing the three models (linear, GLM, DNN), I choose GLM for generating the predication for the given test set. # In[212]: XX = np.array(df_test[cl]) XX = np.insert(XX, 0, 1, axis=1) dist_x = XX[:,1] #DNN_TD = dist_x/np.squeeze(DNN_model.predict(XX/mn))*3600 GLM_TD = dist_x/gamma_results.predict(XX)*3600 df_ans= pd.DataFrame(GLM_TD, columns =['duration']) df_ans.index.name = 'row_id' df_ans.to_csv('answer.csv') df_ans= pd.DataFrame(TD, columns =['duration']) # ## Extention and Further Idea # Here, we only use the vincenty, but by conteccting to google API and fidning the real distance between start and end point the preditor defenitlly can be improved. Also, here I only used 10% of data points becouse of the limitation on runnig the DNN. By using GPU or running over the cloud we can use all the samples. # # # #
normal
{ "blob_id": "c1bb7b579e6b251ddce41384aef1243e411c5d0e", "index": 1018, "step-1": "<mask token>\n\n\ndef distance(row):\n source = row['start_lat'], row['start_lng']\n dest = row['end_lat'], row['end_lng']\n return vincenty(source, dest).miles\n\n\n<mask token>\n\n\ndef dropoff_to_MH(row):\n \"\"\"find the distance between dropoff point and Manhattan center\"\"\"\n dest = row['end_lat'], row['end_lng']\n return vincenty(dest, Manhattan).miles\n\n\ndef day_of_week(ep):\n return datetime.fromtimestamp(ep).strftime('%A')\n\n\n<mask token>\n\n\ndef time_of_day(ep):\n ref = datetime(2015, 1, 1, 0, 0, 0)\n sec = (datetime.fromtimestamp(ep) - ref).seconds\n return min(sec, 86400 - sec)\n\n\ndef year(ep):\n return datetime.fromtimestamp(ep).year\n\n\ndef add_features(df_train_s):\n DD = df_train_s['start_timestamp'].map(day_of_week)\n df_train_s['day'] = DD\n DD = pd.get_dummies(DD, prefix='day', drop_first=True)\n df_train_s = pd.concat([df_train_s, DD], axis=1)\n df_train_s['month'] = df_train_s['start_timestamp'].map(month)\n df_train_s['time_of_day'] = df_train_s['start_timestamp'].map(time_of_day)\n df_train_s['distance'] = df_train_s.apply(lambda x: distance(x), axis=1)\n df_train_s['distance2'] = df_train_s['distance'] ** 2\n df_train_s['pickup_MH'] = df_train_s.apply(pickup_to_MH, axis=1)\n df_train_s['dropoff_MH'] = df_train_s.apply(dropoff_to_MH, axis=1)\n return df_train_s\n\n\n<mask token>\n\n\ndef mean_absolute_percentage_error(dist, y_true, y_pred):\n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(y_true / y_pred - 1)\n err = err[np.isfinite(err)]\n return np.mean(err) * 100\n\n\ndef evalute(dist, y_true, prediction):\n MAE, MAPE = {}, {}\n for kys, y_pred in prediction.items():\n MAE[kys] = mean_absolute_error(dist, y_true, y_pred)\n MAPE[kys] = mean_absolute_percentage_error(dist, y_true, y_pred)\n return MAE, MAPE\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef distance(row):\n source = row['start_lat'], row['start_lng']\n dest = row['end_lat'], row['end_lng']\n return vincenty(source, dest).miles\n\n\n<mask token>\n\n\ndef dropoff_to_MH(row):\n \"\"\"find the distance between dropoff point and Manhattan center\"\"\"\n dest = row['end_lat'], row['end_lng']\n return vincenty(dest, Manhattan).miles\n\n\ndef day_of_week(ep):\n return datetime.fromtimestamp(ep).strftime('%A')\n\n\ndef month(ep):\n return datetime.fromtimestamp(ep).month\n\n\ndef time_of_day(ep):\n ref = datetime(2015, 1, 1, 0, 0, 0)\n sec = (datetime.fromtimestamp(ep) - ref).seconds\n return min(sec, 86400 - sec)\n\n\ndef year(ep):\n return datetime.fromtimestamp(ep).year\n\n\ndef add_features(df_train_s):\n DD = df_train_s['start_timestamp'].map(day_of_week)\n df_train_s['day'] = DD\n DD = pd.get_dummies(DD, prefix='day', drop_first=True)\n df_train_s = pd.concat([df_train_s, DD], axis=1)\n df_train_s['month'] = df_train_s['start_timestamp'].map(month)\n df_train_s['time_of_day'] = df_train_s['start_timestamp'].map(time_of_day)\n df_train_s['distance'] = df_train_s.apply(lambda x: distance(x), axis=1)\n df_train_s['distance2'] = df_train_s['distance'] ** 2\n df_train_s['pickup_MH'] = df_train_s.apply(pickup_to_MH, axis=1)\n df_train_s['dropoff_MH'] = df_train_s.apply(dropoff_to_MH, axis=1)\n return df_train_s\n\n\n<mask token>\n\n\ndef mean_absolute_percentage_error(dist, y_true, y_pred):\n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(y_true / y_pred - 1)\n err = err[np.isfinite(err)]\n return np.mean(err) * 100\n\n\ndef evalute(dist, y_true, prediction):\n MAE, MAPE = {}, {}\n for kys, y_pred in prediction.items():\n MAE[kys] = mean_absolute_error(dist, y_true, y_pred)\n MAPE[kys] = mean_absolute_percentage_error(dist, y_true, y_pred)\n return MAE, MAPE\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef distance(row):\n source = row['start_lat'], row['start_lng']\n dest = row['end_lat'], row['end_lng']\n return vincenty(source, dest).miles\n\n\n<mask token>\n\n\ndef pickup_to_MH(row):\n \"\"\"find the distance between pick up point and Manhattan center\"\"\"\n source = row['start_lat'], row['start_lng']\n return vincenty(source, Manhattan).miles\n\n\ndef dropoff_to_MH(row):\n \"\"\"find the distance between dropoff point and Manhattan center\"\"\"\n dest = row['end_lat'], row['end_lng']\n return vincenty(dest, Manhattan).miles\n\n\ndef day_of_week(ep):\n return datetime.fromtimestamp(ep).strftime('%A')\n\n\ndef month(ep):\n return datetime.fromtimestamp(ep).month\n\n\ndef time_of_day(ep):\n ref = datetime(2015, 1, 1, 0, 0, 0)\n sec = (datetime.fromtimestamp(ep) - ref).seconds\n return min(sec, 86400 - sec)\n\n\ndef year(ep):\n return datetime.fromtimestamp(ep).year\n\n\ndef add_features(df_train_s):\n DD = df_train_s['start_timestamp'].map(day_of_week)\n df_train_s['day'] = DD\n DD = pd.get_dummies(DD, prefix='day', drop_first=True)\n df_train_s = pd.concat([df_train_s, DD], axis=1)\n df_train_s['month'] = df_train_s['start_timestamp'].map(month)\n df_train_s['time_of_day'] = df_train_s['start_timestamp'].map(time_of_day)\n df_train_s['distance'] = df_train_s.apply(lambda x: distance(x), axis=1)\n df_train_s['distance2'] = df_train_s['distance'] ** 2\n df_train_s['pickup_MH'] = df_train_s.apply(pickup_to_MH, axis=1)\n df_train_s['dropoff_MH'] = df_train_s.apply(dropoff_to_MH, axis=1)\n return df_train_s\n\n\n<mask token>\n\n\ndef mean_absolute_error(dist, y_true, y_pred):\n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(dist / y_true - dist / y_pred)\n err = err[np.isfinite(err)]\n return np.mean(err) * 3600\n\n\ndef mean_absolute_percentage_error(dist, y_true, y_pred):\n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(y_true / y_pred - 1)\n err = err[np.isfinite(err)]\n return np.mean(err) * 100\n\n\ndef evalute(dist, y_true, prediction):\n MAE, MAPE = {}, {}\n for kys, y_pred in prediction.items():\n MAE[kys] = mean_absolute_error(dist, y_true, y_pred)\n MAPE[kys] = mean_absolute_percentage_error(dist, y_true, y_pred)\n return MAE, MAPE\n\n\n<mask token>\n", "step-4": "<mask token>\nget_ipython().magic('matplotlib inline')\n<mask token>\ndf_train.head()\n\n\ndef distance(row):\n source = row['start_lat'], row['start_lng']\n dest = row['end_lat'], row['end_lng']\n return vincenty(source, dest).miles\n\n\n<mask token>\n\n\ndef pickup_to_MH(row):\n \"\"\"find the distance between pick up point and Manhattan center\"\"\"\n source = row['start_lat'], row['start_lng']\n return vincenty(source, Manhattan).miles\n\n\ndef dropoff_to_MH(row):\n \"\"\"find the distance between dropoff point and Manhattan center\"\"\"\n dest = row['end_lat'], row['end_lng']\n return vincenty(dest, Manhattan).miles\n\n\ndef day_of_week(ep):\n return datetime.fromtimestamp(ep).strftime('%A')\n\n\ndef month(ep):\n return datetime.fromtimestamp(ep).month\n\n\ndef time_of_day(ep):\n ref = datetime(2015, 1, 1, 0, 0, 0)\n sec = (datetime.fromtimestamp(ep) - ref).seconds\n return min(sec, 86400 - sec)\n\n\ndef year(ep):\n return datetime.fromtimestamp(ep).year\n\n\ndef add_features(df_train_s):\n DD = df_train_s['start_timestamp'].map(day_of_week)\n df_train_s['day'] = DD\n DD = pd.get_dummies(DD, prefix='day', drop_first=True)\n df_train_s = pd.concat([df_train_s, DD], axis=1)\n df_train_s['month'] = df_train_s['start_timestamp'].map(month)\n df_train_s['time_of_day'] = df_train_s['start_timestamp'].map(time_of_day)\n df_train_s['distance'] = df_train_s.apply(lambda x: distance(x), axis=1)\n df_train_s['distance2'] = df_train_s['distance'] ** 2\n df_train_s['pickup_MH'] = df_train_s.apply(pickup_to_MH, axis=1)\n df_train_s['dropoff_MH'] = df_train_s.apply(dropoff_to_MH, axis=1)\n return df_train_s\n\n\nnp.random.seed(42)\n<mask token>\ndf_train_s.head()\n<mask token>\nax.set(xlabel='latitude', ylabel='longitude')\n<mask token>\nax.set(xlabel='latitude', ylabel='longitude')\nplt.show()\ndf_train_s[['distance', 'duration', 'velocity']].describe()\ndf_train_s['velocity'].hist(bins=1000, normed=True)\n<mask token>\nsns.heatmap(corr, mask=mask, cmap=cmap, vmax=0.3, square=True, linewidths=\n 0.5, cbar_kws={'shrink': 0.5}, ax=ax)\nplt.show()\ndf_train_s.plot.scatter('distance', 'velocity')\n<mask token>\ngr.mean().plot.bar(yerr=gr.std())\n<mask token>\nlist(enumerate(cl))\ndist_train.mean()\n<mask token>\nmodel_sk.fit(X_train, y_train)\nplt.figure(figsize=(12, 8))\nplt.bar(np.arange(model_sk.coef_.shape[0]) - 0.4, model_sk.coef_)\nplt.xticks(np.arange(model_sk.coef_.shape[0]), cl, rotation='vertical')\nplt.xlim([-1, model_sk.coef_.shape[0]])\nplt.title('Linear model coefficients')\nplt.show()\n<mask token>\nprint(linear_results.summary())\n<mask token>\nprint(gamma_results.summary())\n<mask token>\nDNN_model.add(Dense(100, input_dim=X_train.shape[1], init='uniform',\n activation='relu'))\nDNN_model.add(Dropout(0.5))\nDNN_model.add(Dense(50, init='uniform', activation='softmax'))\nDNN_model.add(Dropout(0.5))\nDNN_model.add(Dense(100, init='uniform', activation='relu'))\nDNN_model.add(Dropout(0.5))\nDNN_model.add(Dense(1, init='uniform', activation='relu'))\nDNN_model.summary()\n<mask token>\nDNN_model.compile(loss='mean_absolute_error', optimizer='adam')\n<mask token>\nplt.figure(figsize=(10, 8))\nplt.title('Dense model training', fontsize=12)\nplt.plot(history.history['loss'], label='Train')\nplt.plot(history.history['val_loss'], label='Test')\nplt.grid('on')\nplt.xlabel('Epoch', fontsize=12)\nplt.ylabel('loss', fontsize=12)\nplt.legend(loc='upper right')\n<mask token>\n\n\ndef mean_absolute_error(dist, y_true, y_pred):\n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(dist / y_true - dist / y_pred)\n err = err[np.isfinite(err)]\n return np.mean(err) * 3600\n\n\ndef mean_absolute_percentage_error(dist, y_true, y_pred):\n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(y_true / y_pred - 1)\n err = err[np.isfinite(err)]\n return np.mean(err) * 100\n\n\ndef evalute(dist, y_true, prediction):\n MAE, MAPE = {}, {}\n for kys, y_pred in prediction.items():\n MAE[kys] = mean_absolute_error(dist, y_true, y_pred)\n MAPE[kys] = mean_absolute_percentage_error(dist, y_true, y_pred)\n return MAE, MAPE\n\n\n<mask token>\npd.DataFrame([MAE_test, MAE_train, MAPE_test, MAPE_train], index=[\n 'MAE_test', 'MAE_train', 'MAPE_test', 'MAPE_train']).transpose()\ndist_train.mean()\n<mask token>\ndf_ans.to_csv('answer.csv')\n<mask token>\n", "step-5": "\n# coding: utf-8\n\n# ## Estimating Travel Time\n# \n# \n# The objective of this document is proposing a prediction model for estimating the travel time of two\n# specified locations at a given departure time. The main idea here is predicting the velocity of the trip. Given the distance between starting and ending point of the trip, it is possible to easily compute the Travel Time. \n# According to the given data, different features including the time of the day, day of the week, month, travel distance, and distance to the center of the city (New York) are used.\n# Different prediction models (Linear, GLM and Deep Neural Network) are compared, and the GLM is used for genrating the final results.\n\n# ## Preparation\n# Import required libraries\n\n# In[136]:\n\nimport numpy as np\nimport pandas as pd\nfrom geopy.distance import vincenty\nfrom datetime import datetime\nfrom datetime import timedelta\nfrom datetime import time\n\nimport statsmodels.api as sm\n \n\n\nfrom sklearn.datasets import load_boston\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.cross_validation import KFold\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.metrics import mean_absolute_error, mean_squared_error\n\n\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n\n\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation, Flatten\nfrom keras.layers.normalization import BatchNormalization\n\n\nget_ipython().magic('matplotlib inline')\n\n\n# ## Reading data\n\n# In[169]:\n\ndf_train = pd.read_csv('train.csv',index_col= 'row_id')\ndf_test = pd.read_csv('test.csv',index_col= 'row_id')\ndf_train.head()\n\n\n# ## Feature engineering\n# \n# It is clear that the travel time of trip depends on the starting and ending point. In other words, the most uncertain component in the prediction of travel time is the velocity of the trip. Given the velocity and the distance, it is easy to compute the duration of the travel. \n# \n# Also, I observed all travels in both train and test dataset are happening around New York City. Therefore, the main component in determining the velocity of is the city traffic. We know that traffic is a time-dependent phenomenon which depends on the time of the day, the day of the week, and month of the year. In addition, the traffic is usually heavier in Manhattan (downtown of the city) in comparing to the other point of the city. Therefore, if the starting or ending point of the travel is close to the Manhattan we expect higher traffic comparing to the other neighborhoods. In visualization section, I provide enough evidence from the data set to support the aforementioned claims. \n# \n# According to this observation the following features are computted by using the raw data and added to the dataframe.\n# \n# * Distance between starting and ending computted by vincenty formula\n# * The time of the day of travel (in sec far from the midnight) \n# * The day of the week (Monday, Tuesday, etc). For this categorical data, six dummy variables are added to datafram\n# * The month of the travel to capture seasnolity effect.\n# * The sequare of distance\n# * The velocity is used as the predication variable.\n# \n\n# In[156]:\n\ndef distance(row):\n source = (row['start_lat'], row['start_lng'])\n dest = ( row['end_lat'], row['end_lng'])\n return vincenty(source,dest).miles\n\n\nManhattan = (40.7831, -73.9712)\ndef pickup_to_MH(row):\n '''find the distance between pick up point and Manhattan center'''\n source = (row['start_lat'], row['start_lng'])\n return vincenty(source,Manhattan).miles\n\ndef dropoff_to_MH(row):\n '''find the distance between dropoff point and Manhattan center'''\n dest = ( row['end_lat'], row['end_lng'])\n return vincenty(dest,Manhattan).miles\n\ndef day_of_week(ep):\n return datetime.fromtimestamp(ep).strftime(\"%A\")\n\n\ndef month(ep):\n return datetime.fromtimestamp(ep).month\n\ndef time_of_day(ep):\n ref = datetime(2015, 1, 1, 0, 0, 0)\n sec = (datetime.fromtimestamp(ep)- ref).seconds\n return min(sec, 86400- sec)\n \ndef year(ep):\n return datetime.fromtimestamp(ep).year\n\ndef add_features(df_train_s):\n \n # Add day of the week and the dummy variable\n DD = df_train_s['start_timestamp'].map(day_of_week)\n df_train_s['day'] = DD\n \n DD = pd.get_dummies( DD,prefix='day', drop_first=True)\n df_train_s = pd.concat([df_train_s, DD],axis =1 )\n\n # Month, time of the dat, df_train_s\n df_train_s['month'] = df_train_s['start_timestamp'].map(month)\n df_train_s['time_of_day'] = df_train_s['start_timestamp'].map(time_of_day)\n \n # distance between start and end of the trip\n df_train_s['distance'] = df_train_s.apply(lambda x :distance(x), axis=1 )\n df_train_s['distance2'] = df_train_s['distance']**2\n\n # distance between start, end, and center of Manhatan \n df_train_s['pickup_MH'] = df_train_s.apply(pickup_to_MH, axis=1 )\n df_train_s['dropoff_MH'] = df_train_s.apply(dropoff_to_MH, axis=1 )\n return df_train_s\n\n\n# Now, we can easily add all of the above features to both traing and test data set. Due to time limtation and calculation power I only used 10% of the traing data.\n\n# In[24]:\n\nnp.random.seed(42)\ndf_train_s = df_train.sample(frac=0.01, replace=False)\ndf_train_s = add_features(df_train_s)\ndf_train_s['velocity'] = np.array(df_train_s['distance']/(df_train_s['duration']/3600))\n\n\n# In[25]:\n\ndf_train_s.head()\n\n\n# In[170]:\n\n# adding the feature to test set.\ndf_test = add_features(df_test)\n\n\n# ## Removing Outlires\n# The following functions are used to compute these features. Considering the speed limit and the fact the usual trafic in New York, it is reseanable to assume that always the speed show not exceed 90 mph. Therefore, I remove the points with more than this number as the outlires. Also, I removed the data with less than .5 mph. Specificlly, there exists many samples with zero distance between starting and ending point which might happen becouse GPS problem.\n\n# In[41]:\n\ndf_train_s = df_train_s[df_train_s['velocity']<90]\ndf_train_s = df_train_s[df_train_s['velocity']>.5]\n\n\n# ## Data Visulazation\n# \n# First we look at the starting and ending point of the trips which happens in New York.\n# \n# \n# \n\n# In[30]:\n\nfig, axes = plt.subplots(nrows=1, ncols=2, figsize=(12, 6))\n\nax = df_train_s.plot.scatter( 'start_lat','start_lng',\n ax = axes[0],\n title='Start point of travel')\nax.set(xlabel=\"latitude\", ylabel='longitude')\nax = df_train_s.plot.scatter('end_lng','end_lat',\n ax = axes[1],\n title='Destination of the travel')\nax.set(xlabel=\"latitude\", ylabel='longitude')\nplt.show()\n\n\n# Here are some statitcs about the volacity, distance of each trip and its duration. Also, we looked at the density function of the volacity. A log-normal or Gamma distribution are approprate candiatdes for this distribution.\n\n# In[42]:\n\ndf_train_s[['distance', 'duration','velocity']].describe()\n\n\n# In[43]:\n\ndf_train_s['velocity'].hist(bins=1000,normed=True)\n\n\n# ### Corrolation matrix\n\n# In[44]:\n\ncorr = df_train_s.corr()\n\n# generate a mask for the lower triangle\nmask = np.zeros_like(corr, dtype=np.bool)\nmask[np.triu_indices_from(mask)] = True\n\n# set up the matplotlib figure\nf, ax = plt.subplots(figsize=(18, 18))\n\n# generate a custom diverging colormap\ncmap = sns.diverging_palette(220, 10, as_cmap=True)\n\n# draw the heatmap with the mask and correct aspect ratio\nsns.heatmap(corr, mask=mask, cmap=cmap, vmax=.3,\n square=True, \n linewidths=.5, cbar_kws={\"shrink\": .5}, ax=ax)\n\nplt.show()\n\n\n# In[53]:\n\ndf_train_s.plot.scatter( 'distance','velocity')\n\n\n# In[48]:\n\n### Seanility and time Effect on Velocity\ngr= df_train_s[['velocity','month']].groupby(by='month')\ngr.mean().plot.bar(yerr=gr.std())\n\n\n# ## Data preprocessing\n# \n# Let's split our data to train and test set in fraction of $\\frac{4}{1}$ to facilate comparing the results. \n# This test set is differenet from the given test set.\n\n# In[105]:\n\ncl = list(set(df_train_s.keys())-{'velocity','duration','day'})\nX = np.array(df_train_s[cl])\nX1 = np.insert(X, 0, 1, axis=1)\ny = np.array(df_train_s['velocity'])\n\n\nX_train, X_test, y_train, y_test = train_test_split(X1, y, test_size=0.2, random_state=42)\n\ndist_train = X_train[:,1]\ndist_test = X_test[:,1]\n\n\n# In[106]:\n\nlist(enumerate(cl))\ndist_train.mean()\n\n\n# ## Linear Model \n\n# In[204]:\n\nmodel_sk = LinearRegression()\nmodel_sk.fit(X_train, y_train)\n\nplt.figure(figsize=(12, 8))\nplt.bar(np.arange(model_sk.coef_.shape[0]) - 0.4, model_sk.coef_)\nplt.xticks(np.arange(model_sk.coef_.shape[0]), cl, rotation='vertical')\nplt.xlim([-1, model_sk.coef_.shape[0]])\nplt.title(\"Linear model coefficients\")\nplt.show()\n\n\n# The folling chart also provide better understading. Excepet X12 (dummy for sunday) all the other variables are significant; the p-value is zero and null-hypothesis is rejected.\n\n# In[205]:\n\nlinear_model = sm.OLS(y_train, X_train)\nlinear_results = linear_model.fit()\nprint(linear_results.summary())\n\n\n# ## Generalized Linear Model\n# I tried GLM with gamma fammaly. \n\n# In[206]:\n\ngamma_model = sm.GLM( y_train, X_train,family=sm.families.Gamma())\ngamma_results = gamma_model.fit()\nprint(gamma_results.summary())\n\n\n# ## Deep Neural Network (DNN)\n# \n# Here, I am useing a DNN as a prediction model. I am using the Keras package to train the network. Network includes 3 layers. Also, between each two layer a dropout layer is add. RELU and softmax are used as the activation functions. Here, I define the model. \n# \n# I normilized the data the input data to imporve the performance. \n\n# In[195]:\n\nDNN_model = Sequential()\nDNN_model.add(Dense(100,input_dim=X_train.shape[1],init='uniform',activation='relu'))\nDNN_model.add(Dropout(0.5))\nDNN_model.add(Dense(50,init='uniform',activation='softmax'))\nDNN_model.add(Dropout(0.5))\nDNN_model.add(Dense(100,init='uniform',activation='relu'))\nDNN_model.add(Dropout(0.5))\nDNN_model.add(Dense(1,init='uniform',activation='relu'))\n\nDNN_model.summary()\n\n\n# ### Fitting the DNN\n\n# In[196]:\n\nmn = X1.mean(axis=0)\n#model.compile(loss='mean_absolute_error',optimizer='adam',metrics='[accuracy]')\nDNN_model.compile(loss='mean_absolute_error',optimizer='adam')\nhistory = DNN_model.fit(X_train/mn,y_train, \n validation_data=(X_test/mn, y_test),\n epochs =100,\n batch_size=100,\n verbose=2)\n\n\n\n# In[197]:\n\nplt.figure(figsize=(10, 8))\nplt.title(\"Dense model training\", fontsize=12)\nplt.plot(history.history[\"loss\"], label=\"Train\")\nplt.plot(history.history[\"val_loss\"], label=\"Test\")\nplt.grid(\"on\")\nplt.xlabel(\"Epoch\", fontsize=12)\nplt.ylabel(\"loss\", fontsize=12)\nplt.legend(loc=\"upper right\")\n\n\n# ## Evalution\n# \n# In this part, I compare the propsed models and choose the best one. I compare the results based on mean absolute\n# error of predicted versus actual durations, and also mean absolute percentage error which is the percantge of the error. Note that here we compare based on duration as asked in the question and not the velocity. \n# \n\n# In[207]:\n\npreds_test, preds_train = {}, {}\n\n#Linear Model\npreds_test['linear'] = linear_results.predict(X_test)\npreds_train['linear'] = linear_results.predict(X_train)\n\n#GLM (Gamma Model)\n\npreds_test['GLM'] = gamma_results.predict(X_test)\npreds_train['GLM'] = gamma_results.predict(X_train)\n\n#Deep Learning\npreds_test['DL'] = np.squeeze(DNN_model.predict(X_test/mn))\npreds_train['DL'] = np.squeeze(DNN_model.predict(X_train/mn))\n\n\n\n\n# The functions are used for evalution\n\n# In[84]:\n\ndef mean_absolute_error(dist,y_true, y_pred ): \n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(dist/y_true - dist/y_pred)\n err = err[np.isfinite(err)]\n return np.mean(err) *3600\n\n\ndef mean_absolute_percentage_error(dist,y_true, y_pred ): \n \"\"\"\n Args: \n dist(ndarray) : distance between pick up and drop off \n y_true(ndarray) : true velocity\n y_pred(ndarray) : the prediction value of velocity\n\n \"\"\"\n err = np.abs(y_true/y_pred - 1)\n err = err[np.isfinite(err)]\n return np.mean(err)*100\n\n\ndef evalute(dist,y_true,prediction):\n MAE, MAPE= {}, {}\n for kys, y_pred in prediction.items():\n MAE[kys] = mean_absolute_error(dist,y_true, y_pred )\n MAPE[kys] = mean_absolute_percentage_error(dist,y_true, y_pred )\n \n \n return MAE, MAPE\n\n\n# In[209]:\n\nMAE_train, MAPE_train = evalute(dist_train,y_train, preds_train)\nMAE_test, MAPE_test = evalute(dist_test,y_test, preds_test)\n\n\npd.DataFrame([MAE_test,MAE_train, MAPE_test, MAPE_train], \n index= ['MAE_test', 'MAE_train', 'MAPE_test', 'MAPE_train'] ).transpose()\n\n\n# In[201]:\n\ndist_train.mean()\n\n\n# ## Generate Prediction for Test Set\n# \n# By comparing the three models (linear, GLM, DNN), I choose GLM for generating the predication for the given test set. \n\n# In[212]:\n\nXX = np.array(df_test[cl])\nXX = np.insert(XX, 0, 1, axis=1)\n\ndist_x = XX[:,1]\n#DNN_TD = dist_x/np.squeeze(DNN_model.predict(XX/mn))*3600\nGLM_TD = dist_x/gamma_results.predict(XX)*3600\ndf_ans= pd.DataFrame(GLM_TD, columns =['duration'])\n\n\ndf_ans.index.name = 'row_id'\ndf_ans.to_csv('answer.csv')\ndf_ans= pd.DataFrame(TD, columns =['duration'])\n\n\n# ## Extention and Further Idea\n# Here, we only use the vincenty, but by conteccting to google API and fidning the real distance between start and end point the preditor defenitlly can be improved. Also, here I only used 10% of data points becouse of the limitation on runnig the DNN. By using GPU or running over the cloud we can use all the samples. \n# \n# \n# \n# \n", "step-ids": [ 8, 9, 11, 12, 15 ] }
[ 8, 9, 11, 12, 15 ]
"""Module containing class `Station`.""" from zoneinfo import ZoneInfo import datetime from vesper.util.named import Named class Station(Named): """Recording station.""" def __init__( self, name, long_name, time_zone_name, latitude=None, longitude=None, elevation=None): super().__init__(name) self._long_name = long_name self._time_zone = ZoneInfo(time_zone_name) self._latitude = latitude self._longitude = longitude self._elevation = elevation @property def long_name(self): return self._long_name @property def time_zone(self): return self._time_zone @property def latitude(self): return self._latitude @property def longitude(self): return self._longitude @property def elevation(self): return self._elevation def get_night(self, time): """ Gets the station-local night that includes the specified time. :Parameters: time : `datetime` the time whose night is to be gotten. The time may be either naive or aware. If the time is naive, it is assumed to be in the station's time zone. :Returns: the station-local night that includes the specified time, a `date`. The station-local night of a time is the starting date of the local 24-hour period starting at noon that contains the time. """ if time.tzinfo is not None: # time is aware # convert time to station time zone time = time.astimezone(self.time_zone) if time.hour < 12: time -= datetime.timedelta(hours=12) return time.date()
normal
{ "blob_id": "ad09880b9e06a129b9623be2a086ebcc8dc55c2c", "index": 9079, "step-1": "<mask token>\n\n\nclass Station(Named):\n <mask token>\n\n def __init__(self, name, long_name, time_zone_name, latitude=None,\n longitude=None, elevation=None):\n super().__init__(name)\n self._long_name = long_name\n self._time_zone = ZoneInfo(time_zone_name)\n self._latitude = latitude\n self._longitude = longitude\n self._elevation = elevation\n <mask token>\n\n @property\n def time_zone(self):\n return self._time_zone\n\n @property\n def latitude(self):\n return self._latitude\n\n @property\n def longitude(self):\n return self._longitude\n\n @property\n def elevation(self):\n return self._elevation\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Station(Named):\n <mask token>\n\n def __init__(self, name, long_name, time_zone_name, latitude=None,\n longitude=None, elevation=None):\n super().__init__(name)\n self._long_name = long_name\n self._time_zone = ZoneInfo(time_zone_name)\n self._latitude = latitude\n self._longitude = longitude\n self._elevation = elevation\n <mask token>\n\n @property\n def time_zone(self):\n return self._time_zone\n\n @property\n def latitude(self):\n return self._latitude\n\n @property\n def longitude(self):\n return self._longitude\n\n @property\n def elevation(self):\n return self._elevation\n\n def get_night(self, time):\n \"\"\"\n Gets the station-local night that includes the specified time.\n \n :Parameters:\n time : `datetime`\n the time whose night is to be gotten.\n \n The time may be either naive or aware. If the time\n is naive, it is assumed to be in the station's\n time zone.\n \n :Returns:\n the station-local night that includes the specified time, a `date`.\n \n The station-local night of a time is the starting date of the\n local 24-hour period starting at noon that contains the time.\n \"\"\"\n if time.tzinfo is not None:\n time = time.astimezone(self.time_zone)\n if time.hour < 12:\n time -= datetime.timedelta(hours=12)\n return time.date()\n", "step-3": "<mask token>\n\n\nclass Station(Named):\n \"\"\"Recording station.\"\"\"\n\n def __init__(self, name, long_name, time_zone_name, latitude=None,\n longitude=None, elevation=None):\n super().__init__(name)\n self._long_name = long_name\n self._time_zone = ZoneInfo(time_zone_name)\n self._latitude = latitude\n self._longitude = longitude\n self._elevation = elevation\n\n @property\n def long_name(self):\n return self._long_name\n\n @property\n def time_zone(self):\n return self._time_zone\n\n @property\n def latitude(self):\n return self._latitude\n\n @property\n def longitude(self):\n return self._longitude\n\n @property\n def elevation(self):\n return self._elevation\n\n def get_night(self, time):\n \"\"\"\n Gets the station-local night that includes the specified time.\n \n :Parameters:\n time : `datetime`\n the time whose night is to be gotten.\n \n The time may be either naive or aware. If the time\n is naive, it is assumed to be in the station's\n time zone.\n \n :Returns:\n the station-local night that includes the specified time, a `date`.\n \n The station-local night of a time is the starting date of the\n local 24-hour period starting at noon that contains the time.\n \"\"\"\n if time.tzinfo is not None:\n time = time.astimezone(self.time_zone)\n if time.hour < 12:\n time -= datetime.timedelta(hours=12)\n return time.date()\n", "step-4": "<mask token>\nfrom zoneinfo import ZoneInfo\nimport datetime\nfrom vesper.util.named import Named\n\n\nclass Station(Named):\n \"\"\"Recording station.\"\"\"\n\n def __init__(self, name, long_name, time_zone_name, latitude=None,\n longitude=None, elevation=None):\n super().__init__(name)\n self._long_name = long_name\n self._time_zone = ZoneInfo(time_zone_name)\n self._latitude = latitude\n self._longitude = longitude\n self._elevation = elevation\n\n @property\n def long_name(self):\n return self._long_name\n\n @property\n def time_zone(self):\n return self._time_zone\n\n @property\n def latitude(self):\n return self._latitude\n\n @property\n def longitude(self):\n return self._longitude\n\n @property\n def elevation(self):\n return self._elevation\n\n def get_night(self, time):\n \"\"\"\n Gets the station-local night that includes the specified time.\n \n :Parameters:\n time : `datetime`\n the time whose night is to be gotten.\n \n The time may be either naive or aware. If the time\n is naive, it is assumed to be in the station's\n time zone.\n \n :Returns:\n the station-local night that includes the specified time, a `date`.\n \n The station-local night of a time is the starting date of the\n local 24-hour period starting at noon that contains the time.\n \"\"\"\n if time.tzinfo is not None:\n time = time.astimezone(self.time_zone)\n if time.hour < 12:\n time -= datetime.timedelta(hours=12)\n return time.date()\n", "step-5": "\"\"\"Module containing class `Station`.\"\"\"\n\n\nfrom zoneinfo import ZoneInfo\nimport datetime\n\nfrom vesper.util.named import Named\n\n\nclass Station(Named):\n \n \"\"\"Recording station.\"\"\"\n \n \n def __init__(\n self, name, long_name, time_zone_name,\n latitude=None, longitude=None, elevation=None):\n \n super().__init__(name)\n self._long_name = long_name\n self._time_zone = ZoneInfo(time_zone_name)\n self._latitude = latitude\n self._longitude = longitude\n self._elevation = elevation\n \n \n @property\n def long_name(self):\n return self._long_name\n \n \n @property\n def time_zone(self):\n return self._time_zone\n \n \n @property\n def latitude(self):\n return self._latitude\n \n \n @property\n def longitude(self):\n return self._longitude\n \n \n @property\n def elevation(self):\n return self._elevation\n \n \n def get_night(self, time):\n \n \"\"\"\n Gets the station-local night that includes the specified time.\n \n :Parameters:\n time : `datetime`\n the time whose night is to be gotten.\n \n The time may be either naive or aware. If the time\n is naive, it is assumed to be in the station's\n time zone.\n \n :Returns:\n the station-local night that includes the specified time, a `date`.\n \n The station-local night of a time is the starting date of the\n local 24-hour period starting at noon that contains the time.\n \"\"\"\n \n if time.tzinfo is not None:\n # time is aware\n \n # convert time to station time zone\n time = time.astimezone(self.time_zone)\n \n if time.hour < 12:\n time -= datetime.timedelta(hours=12)\n \n return time.date()\n", "step-ids": [ 6, 7, 9, 10, 11 ] }
[ 6, 7, 9, 10, 11 ]
#!/usr/bin/python3 """City Module""" from models.base_model import BaseModel class City(BaseModel): """City Class Public class attributes: state_d: type string name: type string """ state_id = "" name = ""
normal
{ "blob_id": "3f2c1a83ae0dfdba202038a209b90162ccddee36", "index": 6115, "step-1": "<mask token>\n\n\nclass City(BaseModel):\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass City(BaseModel):\n <mask token>\n state_id = ''\n name = ''\n", "step-3": "<mask token>\n\n\nclass City(BaseModel):\n \"\"\"City Class\n Public class attributes:\n state_d: type string\n name: type string\n \"\"\"\n state_id = ''\n name = ''\n", "step-4": "<mask token>\nfrom models.base_model import BaseModel\n\n\nclass City(BaseModel):\n \"\"\"City Class\n Public class attributes:\n state_d: type string\n name: type string\n \"\"\"\n state_id = ''\n name = ''\n", "step-5": "#!/usr/bin/python3\n\"\"\"City Module\"\"\"\nfrom models.base_model import BaseModel\n\n\nclass City(BaseModel):\n \"\"\"City Class\n Public class attributes:\n state_d: type string\n name: type string\n \"\"\"\n state_id = \"\"\n name = \"\"\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
import numpy import yfinance as yf import pandas as pd import path import math pd.options.mode.chained_assignment = None # default='warn' all_tickers = ['2020.OL', 'ABG.OL', 'ADE.OL', 'AFG.OL', 'AKAST.OL', 'AKER.OL', 'AKBM.OL', 'AKRBP.OL', 'AKH.OL', 'AKSO.OL', 'AKVA.OL', 'AMSC.OL', 'AQUA.OL', 'ARCH.OL', 'AZT.OL', 'ARCUS.OL', 'AFK.OL', 'ARR.OL', 'ASTK.OL', 'ATEA.OL', 'ASA.OL', 'AURG.OL', 'AUSS.OL', 'AGAS.OL', 'AWDR.OL', 'ACR.OL', 'B2H.OL', 'BAKKA.OL', 'BELCO.OL', 'BGBIO.OL', 'BEWI.OL', 'BONHR.OL', 'BOR.OL', 'BORR.OL', 'BRG.OL', 'BOUV.OL', 'BWE.OL', 'BWLPG.OL', 'BWO.OL', 'BMA.OL', 'CADLR.OL', 'CARA.OL', 'CONTX.OL', 'CRAYN.OL', 'DLTX.OL', 'DNB.OL', 'DNO.OL', 'DOF.OL', 'EIOF.OL', 'EMGS.OL', 'ELK.OL', 'ENDUR.OL', 'ENSU.OL', 'ENTRA.OL', 'EQNR.OL', 'EPR.OL', 'FJORD.OL', 'FKRFT.OL', 'FLNG.OL', 'FRO.OL', 'FROY.OL', 'GIG.OL', 'RISH.OL', 'GJF.OL', 'GOGL.OL', 'GOD.OL', 'GSF.OL', 'GYL.OL', 'HAFNI.OL', 'HAVI.OL', 'HYARD.OL', 'HEX.OL', 'HBC.OL', 'HSPG.OL', 'IDEX.OL', 'INFRO.OL', 'INSR.OL', 'IOX.OL', 'ITERA.OL', 'JIN.OL', 'JAREN.OL', 'KAHOT.OL', 'KID.OL', 'KIT.OL', 'KMCP.OL', 'KOMP.OL', 'KOA.OL', 'KOG.OL', 'LSG.OL', 'LINK.OL', 'MGN.OL', 'MSEIS.OL', 'MEDI.OL', 'MELG.OL', 'MOWI.OL', 'MPCC.OL', 'MULTI.OL', 'NAPA.OL', 'NAVA.OL', 'NKR.OL', 'NEL.OL', 'NEXT.OL', 'NORBT.OL', 'NANOV.OL', 'NOD.OL', 'NHY.OL', 'NSKOG.OL', 'NODL.OL', 'NOL.OL', 'NRS.OL', 'NAS.OL', 'NOR.OL', 'NOFI.OL', 'NPRO.OL', 'NRC.OL', 'NTS.OL', 'OCY.OL', 'OTS.OL', 'ODL.OL', 'ODF.OL', 'ODFB.OL', 'OKEA.OL', 'OET.OL', 'OLT.OL', 'ORK.OL', 'OTEC.OL', 'PEN.OL', 'PARB.OL', 'PCIB.OL', 'PSE.OL', 'PEXIP.OL', 'PGS.OL', 'PHO.OL', 'PLCS.OL', 'POL.OL', 'PLT.OL', 'PRS.OL', 'PROT.OL', 'QFR.OL', 'QEC.OL', 'RAKP.OL', 'REACH.OL', 'RECSI.OL', 'SAGA.OL', 'SALM.OL', 'SACAM.OL', 'SADG.OL', 'SASNO.OL', 'SATS.OL', 'SBANK.OL', 'SCANA.OL', 'SCATC.OL', 'SCHA.OL', 'SCHB.OL', 'SDSD.OL', 'SBX.OL', 'SDRL.OL', 'SSG.OL', 'SBO.OL', 'SHLF.OL', 'SIOFF.OL', 'SKUE.OL', 'SOGN.OL', 'SOLON.OL', 'SOFF.OL', 'MING.OL', 'SRBNK.OL', 'SOON.OL', 'MORG.OL', 'SOR.OL', 'SVEG.OL', 'SPOG.OL', 'SNOR.OL', 'SPOL.OL', 'HELG.OL', 'NONG.OL', 'RING.OL', 'SOAG.OL', 'SNI.OL', 'STB.OL', 'STRO.OL', 'SUBC.OL', 'TRVX.OL', 'TECH.OL', 'TEL.OL', 'TGS.OL', 'TIETO.OL', 'TOM.OL', 'TOTG.OL', 'TRE.OL', 'ULTI.OL', 'VEI.OL', 'VISTN.OL', 'VOLUE.OL', 'VVL.OL', 'VOW.OL', 'WAWI.OL', 'WSTEP.OL', 'WWI.OL', 'WWIB.OL', 'WILS.OL', 'XXL.OL', 'YAR.OL', 'ZAL.OL'] #all_tickers = ['EQNR.OL', 'NHY.OL'] input_tickers = ['^VIX', 'BZ=F', '^TNX', 'NOK=X'] # + Volume, + 50/200 moving avg #small_cap_tickers = ['FKRFT.OL', 'PROT.OL'] def calculate_returns(ticker_data): returns_list = list() previous_ticker_day = None for ticker_day in ticker_data.itertuples(): if previous_ticker_day == None: returns_list.append( (ticker_day.Close - ticker_day.Open)/ticker_day.Open) else: # 'Adj Close' column will be named _5 by namedTuples returns_list.append( (ticker_day._5 - previous_ticker_day._5)/previous_ticker_day._5) previous_ticker_day = ticker_day return returns_list def add_moving_price_avg(ticker_data, days): counter = 0 moving_avg_price_list = list() unique_dates = list(ticker_data.index.unique()) for ticker_day in ticker_data.itertuples(): if(counter >= days): start = counter-days sub_dates = unique_dates[start : counter] sub_ticker_data = ticker_data[ticker_data.index.isin(sub_dates)] avg_price = sub_ticker_data['Adj Close'].mean() moving_avg_price_list.append(avg_price) counter +=1 else: moving_avg_price_list.append(numpy.nan) counter +=1 return moving_avg_price_list def add_year_column(df_ticker_data): data_frame_output = pd.DataFrame() dates = list(df_ticker_data.index.unique()) dates.sort() for date in dates: sub_date_data = df_ticker_data[df_ticker_data.index == date] sub_date_data['Year'] = date.year data_frame_output = pd.concat([data_frame_output, sub_date_data], ignore_index=False) return data_frame_output def scrape_ticker_data(cap_tickers): df_ticker_data = list() for ticker in cap_tickers: ticker_data = yf.download(ticker, group_by="Ticker", period='max') # add ticker column because the dataframe doesn't contain a column with the ticker ticker_data['Ticker'] = ticker # calculate and add returns column ticker_data['Returns'] = calculate_returns(ticker_data) # add moving avg ticker_data['avg_50'] = add_moving_price_avg(ticker_data, 50) ticker_data['avg_200'] = add_moving_price_avg(ticker_data, 200) df_ticker_data.append(ticker_data) df_concat = pd.concat(df_ticker_data) print("Adding Year column") return add_year_column(df_concat) def scrape_extra_input_data(input_tickers): df_ticker_data = list() for ticker in input_tickers: ticker_data = yf.download(ticker, group_by="Ticker", period='max') # add ticker column because the dataframe doesn't contain a column with the ticker ticker_data['Ticker'] = ticker df_ticker_data.append(ticker_data) df_concat = pd.concat(df_ticker_data) return df_concat def generate_finalized_input_data(input_data): input_finalized = pd.DataFrame() tickers = list(input_data.Ticker.unique()) for ticker in tickers: ticker_data = input_data[input_data.Ticker == ticker] ticker_name = str(ticker_data.iloc[0,-1]) print(ticker_name) ticker_data[ticker_name] = ticker_data['Adj Close'] ticker_data = ticker_data[[ticker_name]] if(input_finalized.empty): input_finalized = ticker_data else: input_finalized = pd.concat([input_finalized, ticker_data], axis=1) return input_finalized def merge_market_cap(df_output): # Reading csv-file using a relative path, based on the folder structure of the github project file_path = path.Path(__file__).parent / "../static/marketCapAllShares.csv" with file_path.open() as dataset_file: df_static_market_cap_per_year = pd.read_csv(dataset_file, delimiter=";") return pd.merge(df_output, df_static_market_cap_per_year, how='left', on=['Ticker', 'Year']).set_index(df_output.index) print("Scraping historic data for all tickers with size " + str(len(all_tickers))) df_scraped_data = scrape_ticker_data(all_tickers) print("Finished scraping data for all cap tickers") print("Scraping input data with size " + str(len(input_tickers))) df_scraped_input = scrape_extra_input_data(input_tickers) print("Concatinating adjusted close for all input data") df_finalized_input = generate_finalized_input_data(df_scraped_input) print("Merging market cap into existing dataframe") df_merged_with_market_cap = merge_market_cap(df_scraped_data) print("Merging existing dataframe with input data") df_finalized = pd.merge(df_merged_with_market_cap, df_finalized_input, how='left', left_index=True, right_index=True).set_index(df_merged_with_market_cap.index) # save to csv df_finalized.to_csv('scrapedData.csv')
normal
{ "blob_id": "22ffda3b2d84218af22bad7835689ec3d4959ab2", "index": 3660, "step-1": "<mask token>\n\n\ndef calculate_returns(ticker_data):\n returns_list = list()\n previous_ticker_day = None\n for ticker_day in ticker_data.itertuples():\n if previous_ticker_day == None:\n returns_list.append((ticker_day.Close - ticker_day.Open) /\n ticker_day.Open)\n else:\n returns_list.append((ticker_day._5 - previous_ticker_day._5) /\n previous_ticker_day._5)\n previous_ticker_day = ticker_day\n return returns_list\n\n\ndef add_moving_price_avg(ticker_data, days):\n counter = 0\n moving_avg_price_list = list()\n unique_dates = list(ticker_data.index.unique())\n for ticker_day in ticker_data.itertuples():\n if counter >= days:\n start = counter - days\n sub_dates = unique_dates[start:counter]\n sub_ticker_data = ticker_data[ticker_data.index.isin(sub_dates)]\n avg_price = sub_ticker_data['Adj Close'].mean()\n moving_avg_price_list.append(avg_price)\n counter += 1\n else:\n moving_avg_price_list.append(numpy.nan)\n counter += 1\n return moving_avg_price_list\n\n\ndef add_year_column(df_ticker_data):\n data_frame_output = pd.DataFrame()\n dates = list(df_ticker_data.index.unique())\n dates.sort()\n for date in dates:\n sub_date_data = df_ticker_data[df_ticker_data.index == date]\n sub_date_data['Year'] = date.year\n data_frame_output = pd.concat([data_frame_output, sub_date_data],\n ignore_index=False)\n return data_frame_output\n\n\ndef scrape_ticker_data(cap_tickers):\n df_ticker_data = list()\n for ticker in cap_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n ticker_data['Returns'] = calculate_returns(ticker_data)\n ticker_data['avg_50'] = add_moving_price_avg(ticker_data, 50)\n ticker_data['avg_200'] = add_moving_price_avg(ticker_data, 200)\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n print('Adding Year column')\n return add_year_column(df_concat)\n\n\ndef scrape_extra_input_data(input_tickers):\n df_ticker_data = list()\n for ticker in input_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n return df_concat\n\n\ndef generate_finalized_input_data(input_data):\n input_finalized = pd.DataFrame()\n tickers = list(input_data.Ticker.unique())\n for ticker in tickers:\n ticker_data = input_data[input_data.Ticker == ticker]\n ticker_name = str(ticker_data.iloc[0, -1])\n print(ticker_name)\n ticker_data[ticker_name] = ticker_data['Adj Close']\n ticker_data = ticker_data[[ticker_name]]\n if input_finalized.empty:\n input_finalized = ticker_data\n else:\n input_finalized = pd.concat([input_finalized, ticker_data], axis=1)\n return input_finalized\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef calculate_returns(ticker_data):\n returns_list = list()\n previous_ticker_day = None\n for ticker_day in ticker_data.itertuples():\n if previous_ticker_day == None:\n returns_list.append((ticker_day.Close - ticker_day.Open) /\n ticker_day.Open)\n else:\n returns_list.append((ticker_day._5 - previous_ticker_day._5) /\n previous_ticker_day._5)\n previous_ticker_day = ticker_day\n return returns_list\n\n\ndef add_moving_price_avg(ticker_data, days):\n counter = 0\n moving_avg_price_list = list()\n unique_dates = list(ticker_data.index.unique())\n for ticker_day in ticker_data.itertuples():\n if counter >= days:\n start = counter - days\n sub_dates = unique_dates[start:counter]\n sub_ticker_data = ticker_data[ticker_data.index.isin(sub_dates)]\n avg_price = sub_ticker_data['Adj Close'].mean()\n moving_avg_price_list.append(avg_price)\n counter += 1\n else:\n moving_avg_price_list.append(numpy.nan)\n counter += 1\n return moving_avg_price_list\n\n\ndef add_year_column(df_ticker_data):\n data_frame_output = pd.DataFrame()\n dates = list(df_ticker_data.index.unique())\n dates.sort()\n for date in dates:\n sub_date_data = df_ticker_data[df_ticker_data.index == date]\n sub_date_data['Year'] = date.year\n data_frame_output = pd.concat([data_frame_output, sub_date_data],\n ignore_index=False)\n return data_frame_output\n\n\ndef scrape_ticker_data(cap_tickers):\n df_ticker_data = list()\n for ticker in cap_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n ticker_data['Returns'] = calculate_returns(ticker_data)\n ticker_data['avg_50'] = add_moving_price_avg(ticker_data, 50)\n ticker_data['avg_200'] = add_moving_price_avg(ticker_data, 200)\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n print('Adding Year column')\n return add_year_column(df_concat)\n\n\ndef scrape_extra_input_data(input_tickers):\n df_ticker_data = list()\n for ticker in input_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n return df_concat\n\n\ndef generate_finalized_input_data(input_data):\n input_finalized = pd.DataFrame()\n tickers = list(input_data.Ticker.unique())\n for ticker in tickers:\n ticker_data = input_data[input_data.Ticker == ticker]\n ticker_name = str(ticker_data.iloc[0, -1])\n print(ticker_name)\n ticker_data[ticker_name] = ticker_data['Adj Close']\n ticker_data = ticker_data[[ticker_name]]\n if input_finalized.empty:\n input_finalized = ticker_data\n else:\n input_finalized = pd.concat([input_finalized, ticker_data], axis=1)\n return input_finalized\n\n\ndef merge_market_cap(df_output):\n file_path = path.Path(__file__).parent / '../static/marketCapAllShares.csv'\n with file_path.open() as dataset_file:\n df_static_market_cap_per_year = pd.read_csv(dataset_file, delimiter=';'\n )\n return pd.merge(df_output, df_static_market_cap_per_year, how='left',\n on=['Ticker', 'Year']).set_index(df_output.index)\n\n\nprint('Scraping historic data for all tickers with size ' + str(len(\n all_tickers)))\n<mask token>\nprint('Finished scraping data for all cap tickers')\nprint('Scraping input data with size ' + str(len(input_tickers)))\n<mask token>\nprint('Concatinating adjusted close for all input data')\n<mask token>\nprint('Merging market cap into existing dataframe')\n<mask token>\nprint('Merging existing dataframe with input data')\n<mask token>\ndf_finalized.to_csv('scrapedData.csv')\n", "step-3": "<mask token>\npd.options.mode.chained_assignment = None\nall_tickers = ['2020.OL', 'ABG.OL', 'ADE.OL', 'AFG.OL', 'AKAST.OL',\n 'AKER.OL', 'AKBM.OL', 'AKRBP.OL', 'AKH.OL', 'AKSO.OL', 'AKVA.OL',\n 'AMSC.OL', 'AQUA.OL', 'ARCH.OL', 'AZT.OL', 'ARCUS.OL', 'AFK.OL',\n 'ARR.OL', 'ASTK.OL', 'ATEA.OL', 'ASA.OL', 'AURG.OL', 'AUSS.OL',\n 'AGAS.OL', 'AWDR.OL', 'ACR.OL', 'B2H.OL', 'BAKKA.OL', 'BELCO.OL',\n 'BGBIO.OL', 'BEWI.OL', 'BONHR.OL', 'BOR.OL', 'BORR.OL', 'BRG.OL',\n 'BOUV.OL', 'BWE.OL', 'BWLPG.OL', 'BWO.OL', 'BMA.OL', 'CADLR.OL',\n 'CARA.OL', 'CONTX.OL', 'CRAYN.OL', 'DLTX.OL', 'DNB.OL', 'DNO.OL',\n 'DOF.OL', 'EIOF.OL', 'EMGS.OL', 'ELK.OL', 'ENDUR.OL', 'ENSU.OL',\n 'ENTRA.OL', 'EQNR.OL', 'EPR.OL', 'FJORD.OL', 'FKRFT.OL', 'FLNG.OL',\n 'FRO.OL', 'FROY.OL', 'GIG.OL', 'RISH.OL', 'GJF.OL', 'GOGL.OL', 'GOD.OL',\n 'GSF.OL', 'GYL.OL', 'HAFNI.OL', 'HAVI.OL', 'HYARD.OL', 'HEX.OL',\n 'HBC.OL', 'HSPG.OL', 'IDEX.OL', 'INFRO.OL', 'INSR.OL', 'IOX.OL',\n 'ITERA.OL', 'JIN.OL', 'JAREN.OL', 'KAHOT.OL', 'KID.OL', 'KIT.OL',\n 'KMCP.OL', 'KOMP.OL', 'KOA.OL', 'KOG.OL', 'LSG.OL', 'LINK.OL', 'MGN.OL',\n 'MSEIS.OL', 'MEDI.OL', 'MELG.OL', 'MOWI.OL', 'MPCC.OL', 'MULTI.OL',\n 'NAPA.OL', 'NAVA.OL', 'NKR.OL', 'NEL.OL', 'NEXT.OL', 'NORBT.OL',\n 'NANOV.OL', 'NOD.OL', 'NHY.OL', 'NSKOG.OL', 'NODL.OL', 'NOL.OL',\n 'NRS.OL', 'NAS.OL', 'NOR.OL', 'NOFI.OL', 'NPRO.OL', 'NRC.OL', 'NTS.OL',\n 'OCY.OL', 'OTS.OL', 'ODL.OL', 'ODF.OL', 'ODFB.OL', 'OKEA.OL', 'OET.OL',\n 'OLT.OL', 'ORK.OL', 'OTEC.OL', 'PEN.OL', 'PARB.OL', 'PCIB.OL', 'PSE.OL',\n 'PEXIP.OL', 'PGS.OL', 'PHO.OL', 'PLCS.OL', 'POL.OL', 'PLT.OL', 'PRS.OL',\n 'PROT.OL', 'QFR.OL', 'QEC.OL', 'RAKP.OL', 'REACH.OL', 'RECSI.OL',\n 'SAGA.OL', 'SALM.OL', 'SACAM.OL', 'SADG.OL', 'SASNO.OL', 'SATS.OL',\n 'SBANK.OL', 'SCANA.OL', 'SCATC.OL', 'SCHA.OL', 'SCHB.OL', 'SDSD.OL',\n 'SBX.OL', 'SDRL.OL', 'SSG.OL', 'SBO.OL', 'SHLF.OL', 'SIOFF.OL',\n 'SKUE.OL', 'SOGN.OL', 'SOLON.OL', 'SOFF.OL', 'MING.OL', 'SRBNK.OL',\n 'SOON.OL', 'MORG.OL', 'SOR.OL', 'SVEG.OL', 'SPOG.OL', 'SNOR.OL',\n 'SPOL.OL', 'HELG.OL', 'NONG.OL', 'RING.OL', 'SOAG.OL', 'SNI.OL',\n 'STB.OL', 'STRO.OL', 'SUBC.OL', 'TRVX.OL', 'TECH.OL', 'TEL.OL',\n 'TGS.OL', 'TIETO.OL', 'TOM.OL', 'TOTG.OL', 'TRE.OL', 'ULTI.OL',\n 'VEI.OL', 'VISTN.OL', 'VOLUE.OL', 'VVL.OL', 'VOW.OL', 'WAWI.OL',\n 'WSTEP.OL', 'WWI.OL', 'WWIB.OL', 'WILS.OL', 'XXL.OL', 'YAR.OL', 'ZAL.OL']\ninput_tickers = ['^VIX', 'BZ=F', '^TNX', 'NOK=X']\n\n\ndef calculate_returns(ticker_data):\n returns_list = list()\n previous_ticker_day = None\n for ticker_day in ticker_data.itertuples():\n if previous_ticker_day == None:\n returns_list.append((ticker_day.Close - ticker_day.Open) /\n ticker_day.Open)\n else:\n returns_list.append((ticker_day._5 - previous_ticker_day._5) /\n previous_ticker_day._5)\n previous_ticker_day = ticker_day\n return returns_list\n\n\ndef add_moving_price_avg(ticker_data, days):\n counter = 0\n moving_avg_price_list = list()\n unique_dates = list(ticker_data.index.unique())\n for ticker_day in ticker_data.itertuples():\n if counter >= days:\n start = counter - days\n sub_dates = unique_dates[start:counter]\n sub_ticker_data = ticker_data[ticker_data.index.isin(sub_dates)]\n avg_price = sub_ticker_data['Adj Close'].mean()\n moving_avg_price_list.append(avg_price)\n counter += 1\n else:\n moving_avg_price_list.append(numpy.nan)\n counter += 1\n return moving_avg_price_list\n\n\ndef add_year_column(df_ticker_data):\n data_frame_output = pd.DataFrame()\n dates = list(df_ticker_data.index.unique())\n dates.sort()\n for date in dates:\n sub_date_data = df_ticker_data[df_ticker_data.index == date]\n sub_date_data['Year'] = date.year\n data_frame_output = pd.concat([data_frame_output, sub_date_data],\n ignore_index=False)\n return data_frame_output\n\n\ndef scrape_ticker_data(cap_tickers):\n df_ticker_data = list()\n for ticker in cap_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n ticker_data['Returns'] = calculate_returns(ticker_data)\n ticker_data['avg_50'] = add_moving_price_avg(ticker_data, 50)\n ticker_data['avg_200'] = add_moving_price_avg(ticker_data, 200)\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n print('Adding Year column')\n return add_year_column(df_concat)\n\n\ndef scrape_extra_input_data(input_tickers):\n df_ticker_data = list()\n for ticker in input_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n return df_concat\n\n\ndef generate_finalized_input_data(input_data):\n input_finalized = pd.DataFrame()\n tickers = list(input_data.Ticker.unique())\n for ticker in tickers:\n ticker_data = input_data[input_data.Ticker == ticker]\n ticker_name = str(ticker_data.iloc[0, -1])\n print(ticker_name)\n ticker_data[ticker_name] = ticker_data['Adj Close']\n ticker_data = ticker_data[[ticker_name]]\n if input_finalized.empty:\n input_finalized = ticker_data\n else:\n input_finalized = pd.concat([input_finalized, ticker_data], axis=1)\n return input_finalized\n\n\ndef merge_market_cap(df_output):\n file_path = path.Path(__file__).parent / '../static/marketCapAllShares.csv'\n with file_path.open() as dataset_file:\n df_static_market_cap_per_year = pd.read_csv(dataset_file, delimiter=';'\n )\n return pd.merge(df_output, df_static_market_cap_per_year, how='left',\n on=['Ticker', 'Year']).set_index(df_output.index)\n\n\nprint('Scraping historic data for all tickers with size ' + str(len(\n all_tickers)))\ndf_scraped_data = scrape_ticker_data(all_tickers)\nprint('Finished scraping data for all cap tickers')\nprint('Scraping input data with size ' + str(len(input_tickers)))\ndf_scraped_input = scrape_extra_input_data(input_tickers)\nprint('Concatinating adjusted close for all input data')\ndf_finalized_input = generate_finalized_input_data(df_scraped_input)\nprint('Merging market cap into existing dataframe')\ndf_merged_with_market_cap = merge_market_cap(df_scraped_data)\nprint('Merging existing dataframe with input data')\ndf_finalized = pd.merge(df_merged_with_market_cap, df_finalized_input, how=\n 'left', left_index=True, right_index=True).set_index(\n df_merged_with_market_cap.index)\ndf_finalized.to_csv('scrapedData.csv')\n", "step-4": "import numpy\nimport yfinance as yf\nimport pandas as pd\nimport path\nimport math\npd.options.mode.chained_assignment = None\nall_tickers = ['2020.OL', 'ABG.OL', 'ADE.OL', 'AFG.OL', 'AKAST.OL',\n 'AKER.OL', 'AKBM.OL', 'AKRBP.OL', 'AKH.OL', 'AKSO.OL', 'AKVA.OL',\n 'AMSC.OL', 'AQUA.OL', 'ARCH.OL', 'AZT.OL', 'ARCUS.OL', 'AFK.OL',\n 'ARR.OL', 'ASTK.OL', 'ATEA.OL', 'ASA.OL', 'AURG.OL', 'AUSS.OL',\n 'AGAS.OL', 'AWDR.OL', 'ACR.OL', 'B2H.OL', 'BAKKA.OL', 'BELCO.OL',\n 'BGBIO.OL', 'BEWI.OL', 'BONHR.OL', 'BOR.OL', 'BORR.OL', 'BRG.OL',\n 'BOUV.OL', 'BWE.OL', 'BWLPG.OL', 'BWO.OL', 'BMA.OL', 'CADLR.OL',\n 'CARA.OL', 'CONTX.OL', 'CRAYN.OL', 'DLTX.OL', 'DNB.OL', 'DNO.OL',\n 'DOF.OL', 'EIOF.OL', 'EMGS.OL', 'ELK.OL', 'ENDUR.OL', 'ENSU.OL',\n 'ENTRA.OL', 'EQNR.OL', 'EPR.OL', 'FJORD.OL', 'FKRFT.OL', 'FLNG.OL',\n 'FRO.OL', 'FROY.OL', 'GIG.OL', 'RISH.OL', 'GJF.OL', 'GOGL.OL', 'GOD.OL',\n 'GSF.OL', 'GYL.OL', 'HAFNI.OL', 'HAVI.OL', 'HYARD.OL', 'HEX.OL',\n 'HBC.OL', 'HSPG.OL', 'IDEX.OL', 'INFRO.OL', 'INSR.OL', 'IOX.OL',\n 'ITERA.OL', 'JIN.OL', 'JAREN.OL', 'KAHOT.OL', 'KID.OL', 'KIT.OL',\n 'KMCP.OL', 'KOMP.OL', 'KOA.OL', 'KOG.OL', 'LSG.OL', 'LINK.OL', 'MGN.OL',\n 'MSEIS.OL', 'MEDI.OL', 'MELG.OL', 'MOWI.OL', 'MPCC.OL', 'MULTI.OL',\n 'NAPA.OL', 'NAVA.OL', 'NKR.OL', 'NEL.OL', 'NEXT.OL', 'NORBT.OL',\n 'NANOV.OL', 'NOD.OL', 'NHY.OL', 'NSKOG.OL', 'NODL.OL', 'NOL.OL',\n 'NRS.OL', 'NAS.OL', 'NOR.OL', 'NOFI.OL', 'NPRO.OL', 'NRC.OL', 'NTS.OL',\n 'OCY.OL', 'OTS.OL', 'ODL.OL', 'ODF.OL', 'ODFB.OL', 'OKEA.OL', 'OET.OL',\n 'OLT.OL', 'ORK.OL', 'OTEC.OL', 'PEN.OL', 'PARB.OL', 'PCIB.OL', 'PSE.OL',\n 'PEXIP.OL', 'PGS.OL', 'PHO.OL', 'PLCS.OL', 'POL.OL', 'PLT.OL', 'PRS.OL',\n 'PROT.OL', 'QFR.OL', 'QEC.OL', 'RAKP.OL', 'REACH.OL', 'RECSI.OL',\n 'SAGA.OL', 'SALM.OL', 'SACAM.OL', 'SADG.OL', 'SASNO.OL', 'SATS.OL',\n 'SBANK.OL', 'SCANA.OL', 'SCATC.OL', 'SCHA.OL', 'SCHB.OL', 'SDSD.OL',\n 'SBX.OL', 'SDRL.OL', 'SSG.OL', 'SBO.OL', 'SHLF.OL', 'SIOFF.OL',\n 'SKUE.OL', 'SOGN.OL', 'SOLON.OL', 'SOFF.OL', 'MING.OL', 'SRBNK.OL',\n 'SOON.OL', 'MORG.OL', 'SOR.OL', 'SVEG.OL', 'SPOG.OL', 'SNOR.OL',\n 'SPOL.OL', 'HELG.OL', 'NONG.OL', 'RING.OL', 'SOAG.OL', 'SNI.OL',\n 'STB.OL', 'STRO.OL', 'SUBC.OL', 'TRVX.OL', 'TECH.OL', 'TEL.OL',\n 'TGS.OL', 'TIETO.OL', 'TOM.OL', 'TOTG.OL', 'TRE.OL', 'ULTI.OL',\n 'VEI.OL', 'VISTN.OL', 'VOLUE.OL', 'VVL.OL', 'VOW.OL', 'WAWI.OL',\n 'WSTEP.OL', 'WWI.OL', 'WWIB.OL', 'WILS.OL', 'XXL.OL', 'YAR.OL', 'ZAL.OL']\ninput_tickers = ['^VIX', 'BZ=F', '^TNX', 'NOK=X']\n\n\ndef calculate_returns(ticker_data):\n returns_list = list()\n previous_ticker_day = None\n for ticker_day in ticker_data.itertuples():\n if previous_ticker_day == None:\n returns_list.append((ticker_day.Close - ticker_day.Open) /\n ticker_day.Open)\n else:\n returns_list.append((ticker_day._5 - previous_ticker_day._5) /\n previous_ticker_day._5)\n previous_ticker_day = ticker_day\n return returns_list\n\n\ndef add_moving_price_avg(ticker_data, days):\n counter = 0\n moving_avg_price_list = list()\n unique_dates = list(ticker_data.index.unique())\n for ticker_day in ticker_data.itertuples():\n if counter >= days:\n start = counter - days\n sub_dates = unique_dates[start:counter]\n sub_ticker_data = ticker_data[ticker_data.index.isin(sub_dates)]\n avg_price = sub_ticker_data['Adj Close'].mean()\n moving_avg_price_list.append(avg_price)\n counter += 1\n else:\n moving_avg_price_list.append(numpy.nan)\n counter += 1\n return moving_avg_price_list\n\n\ndef add_year_column(df_ticker_data):\n data_frame_output = pd.DataFrame()\n dates = list(df_ticker_data.index.unique())\n dates.sort()\n for date in dates:\n sub_date_data = df_ticker_data[df_ticker_data.index == date]\n sub_date_data['Year'] = date.year\n data_frame_output = pd.concat([data_frame_output, sub_date_data],\n ignore_index=False)\n return data_frame_output\n\n\ndef scrape_ticker_data(cap_tickers):\n df_ticker_data = list()\n for ticker in cap_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n ticker_data['Returns'] = calculate_returns(ticker_data)\n ticker_data['avg_50'] = add_moving_price_avg(ticker_data, 50)\n ticker_data['avg_200'] = add_moving_price_avg(ticker_data, 200)\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n print('Adding Year column')\n return add_year_column(df_concat)\n\n\ndef scrape_extra_input_data(input_tickers):\n df_ticker_data = list()\n for ticker in input_tickers:\n ticker_data = yf.download(ticker, group_by='Ticker', period='max')\n ticker_data['Ticker'] = ticker\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n return df_concat\n\n\ndef generate_finalized_input_data(input_data):\n input_finalized = pd.DataFrame()\n tickers = list(input_data.Ticker.unique())\n for ticker in tickers:\n ticker_data = input_data[input_data.Ticker == ticker]\n ticker_name = str(ticker_data.iloc[0, -1])\n print(ticker_name)\n ticker_data[ticker_name] = ticker_data['Adj Close']\n ticker_data = ticker_data[[ticker_name]]\n if input_finalized.empty:\n input_finalized = ticker_data\n else:\n input_finalized = pd.concat([input_finalized, ticker_data], axis=1)\n return input_finalized\n\n\ndef merge_market_cap(df_output):\n file_path = path.Path(__file__).parent / '../static/marketCapAllShares.csv'\n with file_path.open() as dataset_file:\n df_static_market_cap_per_year = pd.read_csv(dataset_file, delimiter=';'\n )\n return pd.merge(df_output, df_static_market_cap_per_year, how='left',\n on=['Ticker', 'Year']).set_index(df_output.index)\n\n\nprint('Scraping historic data for all tickers with size ' + str(len(\n all_tickers)))\ndf_scraped_data = scrape_ticker_data(all_tickers)\nprint('Finished scraping data for all cap tickers')\nprint('Scraping input data with size ' + str(len(input_tickers)))\ndf_scraped_input = scrape_extra_input_data(input_tickers)\nprint('Concatinating adjusted close for all input data')\ndf_finalized_input = generate_finalized_input_data(df_scraped_input)\nprint('Merging market cap into existing dataframe')\ndf_merged_with_market_cap = merge_market_cap(df_scraped_data)\nprint('Merging existing dataframe with input data')\ndf_finalized = pd.merge(df_merged_with_market_cap, df_finalized_input, how=\n 'left', left_index=True, right_index=True).set_index(\n df_merged_with_market_cap.index)\ndf_finalized.to_csv('scrapedData.csv')\n", "step-5": "import numpy\nimport yfinance as yf\nimport pandas as pd\nimport path\nimport math\npd.options.mode.chained_assignment = None # default='warn'\n\nall_tickers = ['2020.OL',\n 'ABG.OL',\n 'ADE.OL',\n 'AFG.OL',\n 'AKAST.OL',\n 'AKER.OL',\n 'AKBM.OL',\n 'AKRBP.OL',\n 'AKH.OL',\n 'AKSO.OL',\n 'AKVA.OL',\n 'AMSC.OL',\n 'AQUA.OL',\n 'ARCH.OL',\n 'AZT.OL',\n 'ARCUS.OL',\n 'AFK.OL',\n 'ARR.OL',\n 'ASTK.OL',\n 'ATEA.OL',\n 'ASA.OL',\n 'AURG.OL',\n 'AUSS.OL',\n 'AGAS.OL',\n 'AWDR.OL',\n 'ACR.OL',\n 'B2H.OL',\n 'BAKKA.OL',\n 'BELCO.OL',\n 'BGBIO.OL',\n 'BEWI.OL',\n 'BONHR.OL',\n 'BOR.OL',\n 'BORR.OL',\n 'BRG.OL',\n 'BOUV.OL',\n 'BWE.OL',\n 'BWLPG.OL',\n 'BWO.OL',\n 'BMA.OL',\n 'CADLR.OL',\n 'CARA.OL',\n 'CONTX.OL',\n 'CRAYN.OL',\n 'DLTX.OL',\n 'DNB.OL',\n 'DNO.OL',\n 'DOF.OL',\n 'EIOF.OL',\n 'EMGS.OL',\n 'ELK.OL',\n 'ENDUR.OL',\n 'ENSU.OL',\n 'ENTRA.OL',\n 'EQNR.OL',\n 'EPR.OL',\n 'FJORD.OL',\n 'FKRFT.OL',\n 'FLNG.OL',\n 'FRO.OL',\n 'FROY.OL',\n 'GIG.OL',\n 'RISH.OL',\n 'GJF.OL',\n 'GOGL.OL',\n 'GOD.OL',\n 'GSF.OL',\n 'GYL.OL',\n 'HAFNI.OL',\n 'HAVI.OL',\n 'HYARD.OL',\n 'HEX.OL',\n 'HBC.OL',\n 'HSPG.OL',\n 'IDEX.OL',\n 'INFRO.OL',\n 'INSR.OL',\n 'IOX.OL',\n 'ITERA.OL',\n 'JIN.OL',\n 'JAREN.OL',\n 'KAHOT.OL',\n 'KID.OL',\n 'KIT.OL',\n 'KMCP.OL',\n 'KOMP.OL',\n 'KOA.OL',\n 'KOG.OL',\n 'LSG.OL',\n 'LINK.OL',\n 'MGN.OL',\n 'MSEIS.OL',\n 'MEDI.OL',\n 'MELG.OL',\n 'MOWI.OL',\n 'MPCC.OL',\n 'MULTI.OL',\n 'NAPA.OL',\n 'NAVA.OL',\n 'NKR.OL',\n 'NEL.OL',\n 'NEXT.OL',\n 'NORBT.OL',\n 'NANOV.OL',\n 'NOD.OL',\n 'NHY.OL',\n 'NSKOG.OL',\n 'NODL.OL',\n 'NOL.OL',\n 'NRS.OL',\n 'NAS.OL',\n 'NOR.OL',\n 'NOFI.OL',\n 'NPRO.OL',\n 'NRC.OL',\n 'NTS.OL',\n 'OCY.OL',\n 'OTS.OL',\n 'ODL.OL',\n 'ODF.OL',\n 'ODFB.OL',\n 'OKEA.OL',\n 'OET.OL',\n 'OLT.OL',\n 'ORK.OL',\n 'OTEC.OL',\n 'PEN.OL',\n 'PARB.OL',\n 'PCIB.OL',\n 'PSE.OL',\n 'PEXIP.OL',\n 'PGS.OL',\n 'PHO.OL',\n 'PLCS.OL',\n 'POL.OL',\n 'PLT.OL',\n 'PRS.OL',\n 'PROT.OL',\n 'QFR.OL',\n 'QEC.OL',\n 'RAKP.OL',\n 'REACH.OL',\n 'RECSI.OL',\n 'SAGA.OL',\n 'SALM.OL',\n 'SACAM.OL',\n 'SADG.OL',\n 'SASNO.OL',\n 'SATS.OL',\n 'SBANK.OL',\n 'SCANA.OL',\n 'SCATC.OL',\n 'SCHA.OL',\n 'SCHB.OL',\n 'SDSD.OL',\n 'SBX.OL',\n 'SDRL.OL',\n 'SSG.OL',\n 'SBO.OL',\n 'SHLF.OL',\n 'SIOFF.OL',\n 'SKUE.OL',\n 'SOGN.OL',\n 'SOLON.OL',\n 'SOFF.OL',\n 'MING.OL',\n 'SRBNK.OL',\n 'SOON.OL',\n 'MORG.OL',\n 'SOR.OL',\n 'SVEG.OL',\n 'SPOG.OL',\n 'SNOR.OL',\n 'SPOL.OL',\n 'HELG.OL',\n 'NONG.OL',\n 'RING.OL',\n 'SOAG.OL',\n 'SNI.OL',\n 'STB.OL',\n 'STRO.OL',\n 'SUBC.OL',\n 'TRVX.OL',\n 'TECH.OL',\n 'TEL.OL',\n 'TGS.OL',\n 'TIETO.OL',\n 'TOM.OL',\n 'TOTG.OL',\n 'TRE.OL',\n 'ULTI.OL',\n 'VEI.OL',\n 'VISTN.OL',\n 'VOLUE.OL',\n 'VVL.OL',\n 'VOW.OL',\n 'WAWI.OL',\n 'WSTEP.OL',\n 'WWI.OL',\n 'WWIB.OL',\n 'WILS.OL',\n 'XXL.OL',\n 'YAR.OL',\n 'ZAL.OL']\n\n#all_tickers = ['EQNR.OL', 'NHY.OL']\ninput_tickers = ['^VIX', 'BZ=F', '^TNX', 'NOK=X'] # + Volume, + 50/200 moving avg\n#small_cap_tickers = ['FKRFT.OL', 'PROT.OL']\n\ndef calculate_returns(ticker_data):\n returns_list = list()\n previous_ticker_day = None\n for ticker_day in ticker_data.itertuples():\n if previous_ticker_day == None:\n returns_list.append(\n (ticker_day.Close - ticker_day.Open)/ticker_day.Open)\n else:\n # 'Adj Close' column will be named _5 by namedTuples\n returns_list.append(\n (ticker_day._5 - previous_ticker_day._5)/previous_ticker_day._5)\n previous_ticker_day = ticker_day\n return returns_list\n\ndef add_moving_price_avg(ticker_data, days):\n counter = 0\n moving_avg_price_list = list()\n unique_dates = list(ticker_data.index.unique())\n for ticker_day in ticker_data.itertuples():\n if(counter >= days):\n start = counter-days\n sub_dates = unique_dates[start : counter]\n sub_ticker_data = ticker_data[ticker_data.index.isin(sub_dates)]\n avg_price = sub_ticker_data['Adj Close'].mean()\n moving_avg_price_list.append(avg_price)\n counter +=1\n else:\n moving_avg_price_list.append(numpy.nan)\n counter +=1\n return moving_avg_price_list\n\ndef add_year_column(df_ticker_data):\n data_frame_output = pd.DataFrame()\n dates = list(df_ticker_data.index.unique())\n dates.sort()\n for date in dates:\n sub_date_data = df_ticker_data[df_ticker_data.index == date]\n sub_date_data['Year'] = date.year\n data_frame_output = pd.concat([data_frame_output, sub_date_data], ignore_index=False)\n return data_frame_output\n\ndef scrape_ticker_data(cap_tickers):\n df_ticker_data = list()\n for ticker in cap_tickers:\n ticker_data = yf.download(ticker, group_by=\"Ticker\", period='max')\n # add ticker column because the dataframe doesn't contain a column with the ticker\n ticker_data['Ticker'] = ticker\n # calculate and add returns column\n ticker_data['Returns'] = calculate_returns(ticker_data)\n # add moving avg\n ticker_data['avg_50'] = add_moving_price_avg(ticker_data, 50)\n ticker_data['avg_200'] = add_moving_price_avg(ticker_data, 200)\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n print(\"Adding Year column\")\n return add_year_column(df_concat)\n\ndef scrape_extra_input_data(input_tickers):\n df_ticker_data = list()\n for ticker in input_tickers:\n ticker_data = yf.download(ticker, group_by=\"Ticker\", period='max')\n # add ticker column because the dataframe doesn't contain a column with the ticker\n ticker_data['Ticker'] = ticker\n df_ticker_data.append(ticker_data)\n df_concat = pd.concat(df_ticker_data)\n return df_concat\n\ndef generate_finalized_input_data(input_data):\n input_finalized = pd.DataFrame()\n tickers = list(input_data.Ticker.unique())\n for ticker in tickers:\n ticker_data = input_data[input_data.Ticker == ticker]\n ticker_name = str(ticker_data.iloc[0,-1])\n print(ticker_name)\n ticker_data[ticker_name] = ticker_data['Adj Close']\n ticker_data = ticker_data[[ticker_name]]\n if(input_finalized.empty):\n input_finalized = ticker_data\n else:\n input_finalized = pd.concat([input_finalized, ticker_data], axis=1)\n return input_finalized\n\n\ndef merge_market_cap(df_output):\n # Reading csv-file using a relative path, based on the folder structure of the github project\n file_path = path.Path(__file__).parent / \"../static/marketCapAllShares.csv\"\n with file_path.open() as dataset_file:\n df_static_market_cap_per_year = pd.read_csv(dataset_file, delimiter=\";\")\n return pd.merge(df_output, df_static_market_cap_per_year, how='left', on=['Ticker', 'Year']).set_index(df_output.index)\n\nprint(\"Scraping historic data for all tickers with size \" + str(len(all_tickers)))\ndf_scraped_data = scrape_ticker_data(all_tickers)\nprint(\"Finished scraping data for all cap tickers\")\n\nprint(\"Scraping input data with size \" + str(len(input_tickers)))\ndf_scraped_input = scrape_extra_input_data(input_tickers)\nprint(\"Concatinating adjusted close for all input data\")\ndf_finalized_input = generate_finalized_input_data(df_scraped_input)\n\nprint(\"Merging market cap into existing dataframe\")\ndf_merged_with_market_cap = merge_market_cap(df_scraped_data)\n\nprint(\"Merging existing dataframe with input data\")\ndf_finalized = pd.merge(df_merged_with_market_cap, df_finalized_input, how='left', left_index=True, right_index=True).set_index(df_merged_with_market_cap.index)\n\n# save to csv\ndf_finalized.to_csv('scrapedData.csv')", "step-ids": [ 6, 8, 9, 10, 11 ] }
[ 6, 8, 9, 10, 11 ]
<|reserved_special_token_0|> class DehazeNet(nn.Module): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class DehazeNet(nn.Module): def __init__(self, input=16, groups=4): super(DehazeNet, self).__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5) self.relu1 = nn.ReLU() self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 3, padding=1) self.relu2 = nn.ReLU() self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 5, padding=2) self.relu3 = nn.ReLU() self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 7, padding=3) self.relu4 = nn.ReLU() self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1) self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class DehazeNet(nn.Module): def __init__(self, input=16, groups=4): super(DehazeNet, self).__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5) self.relu1 = nn.ReLU() self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 3, padding=1) self.relu2 = nn.ReLU() self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 5, padding=2) self.relu3 = nn.ReLU() self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 7, padding=3) self.relu4 = nn.ReLU() self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1) self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6) def forward(self, x): out = self.conv1(x) out = self.relu1(out) max_1 = torch.max(out[:, 0:4, :, :], out[:, 4:8, :, :]) max_2 = torch.max(out[:, 8:12, :, :], out[:, 12:16, :, :]) out = torch.max(max_1, max_2) out1 = self.conv2(out) out1 = self.relu2(out1) out2 = self.conv3(out) out2 = self.relu3(out2) out3 = self.conv4(out) out3 = self.relu4(out3) y = torch.cat((out1, out2, out3), dim=1) y = self.maxpool(y) y = self.conv5(y) y = torch.max(y, torch.zeros(y.shape[0], y.shape[1], y.shape[2], y. shape[3]).cuda()) y = torch.min(y, torch.ones(y.shape[0], y.shape[1], y.shape[2], y. shape[3]).cuda()) return y <|reserved_special_token_1|> import torch import torch.nn as nn class DehazeNet(nn.Module): def __init__(self, input=16, groups=4): super(DehazeNet, self).__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5) self.relu1 = nn.ReLU() self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 3, padding=1) self.relu2 = nn.ReLU() self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 5, padding=2) self.relu3 = nn.ReLU() self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size= 7, padding=3) self.relu4 = nn.ReLU() self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1) self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6) def forward(self, x): out = self.conv1(x) out = self.relu1(out) max_1 = torch.max(out[:, 0:4, :, :], out[:, 4:8, :, :]) max_2 = torch.max(out[:, 8:12, :, :], out[:, 12:16, :, :]) out = torch.max(max_1, max_2) out1 = self.conv2(out) out1 = self.relu2(out1) out2 = self.conv3(out) out2 = self.relu3(out2) out3 = self.conv4(out) out3 = self.relu4(out3) y = torch.cat((out1, out2, out3), dim=1) y = self.maxpool(y) y = self.conv5(y) y = torch.max(y, torch.zeros(y.shape[0], y.shape[1], y.shape[2], y. shape[3]).cuda()) y = torch.min(y, torch.ones(y.shape[0], y.shape[1], y.shape[2], y. shape[3]).cuda()) return y <|reserved_special_token_1|> import torch import torch.nn as nn class DehazeNet(nn.Module): def __init__(self, input=16, groups=4): super(DehazeNet, self).__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5) self.relu1 = nn.ReLU() self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=3, padding=1) self.relu2 = nn.ReLU() self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=5, padding=2) self.relu3 = nn.ReLU() self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=7, padding=3) self.relu4 = nn.ReLU() self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1) self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6) def forward(self, x): #feature extraction out = self.conv1(x) out = self.relu1(out) #maxout max_1 = torch.max(out[:,0:4,:,:],out[:,4:8,:,:]) max_2 = torch.max(out[:,8:12,:,:],out[:,12:16,:,:]) out = torch.max(max_1,max_2) #multi-scale Mapping out1 = self.conv2(out) out1 = self.relu2(out1) out2 = self.conv3(out) out2 = self.relu3(out2) out3 = self.conv4(out) out3 = self.relu4(out3) y = torch.cat((out1,out2,out3), dim=1) #Local Extremum y = self.maxpool(y) #non-linear Regression y = self.conv5(y) y = torch.max(y, torch.zeros(y.shape[0],y.shape[1],y.shape[2],y.shape[3]).cuda()) y = torch.min(y, torch.ones(y.shape[0],y.shape[1],y.shape[2],y.shape[3]).cuda()) return y
flexible
{ "blob_id": "a8cf8d0965cb877d50cee403fbc30f27484f4f36", "index": 8201, "step-1": "<mask token>\n\n\nclass DehazeNet(nn.Module):\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass DehazeNet(nn.Module):\n\n def __init__(self, input=16, groups=4):\n super(DehazeNet, self).__init__()\n self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5)\n self.relu1 = nn.ReLU()\n self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 3, padding=1)\n self.relu2 = nn.ReLU()\n self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 5, padding=2)\n self.relu3 = nn.ReLU()\n self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 7, padding=3)\n self.relu4 = nn.ReLU()\n self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1)\n self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6)\n <mask token>\n", "step-3": "<mask token>\n\n\nclass DehazeNet(nn.Module):\n\n def __init__(self, input=16, groups=4):\n super(DehazeNet, self).__init__()\n self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5)\n self.relu1 = nn.ReLU()\n self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 3, padding=1)\n self.relu2 = nn.ReLU()\n self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 5, padding=2)\n self.relu3 = nn.ReLU()\n self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 7, padding=3)\n self.relu4 = nn.ReLU()\n self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1)\n self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6)\n\n def forward(self, x):\n out = self.conv1(x)\n out = self.relu1(out)\n max_1 = torch.max(out[:, 0:4, :, :], out[:, 4:8, :, :])\n max_2 = torch.max(out[:, 8:12, :, :], out[:, 12:16, :, :])\n out = torch.max(max_1, max_2)\n out1 = self.conv2(out)\n out1 = self.relu2(out1)\n out2 = self.conv3(out)\n out2 = self.relu3(out2)\n out3 = self.conv4(out)\n out3 = self.relu4(out3)\n y = torch.cat((out1, out2, out3), dim=1)\n y = self.maxpool(y)\n y = self.conv5(y)\n y = torch.max(y, torch.zeros(y.shape[0], y.shape[1], y.shape[2], y.\n shape[3]).cuda())\n y = torch.min(y, torch.ones(y.shape[0], y.shape[1], y.shape[2], y.\n shape[3]).cuda())\n return y\n", "step-4": "import torch\nimport torch.nn as nn\n\n\nclass DehazeNet(nn.Module):\n\n def __init__(self, input=16, groups=4):\n super(DehazeNet, self).__init__()\n self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5)\n self.relu1 = nn.ReLU()\n self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 3, padding=1)\n self.relu2 = nn.ReLU()\n self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 5, padding=2)\n self.relu3 = nn.ReLU()\n self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=\n 7, padding=3)\n self.relu4 = nn.ReLU()\n self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1)\n self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6)\n\n def forward(self, x):\n out = self.conv1(x)\n out = self.relu1(out)\n max_1 = torch.max(out[:, 0:4, :, :], out[:, 4:8, :, :])\n max_2 = torch.max(out[:, 8:12, :, :], out[:, 12:16, :, :])\n out = torch.max(max_1, max_2)\n out1 = self.conv2(out)\n out1 = self.relu2(out1)\n out2 = self.conv3(out)\n out2 = self.relu3(out2)\n out3 = self.conv4(out)\n out3 = self.relu4(out3)\n y = torch.cat((out1, out2, out3), dim=1)\n y = self.maxpool(y)\n y = self.conv5(y)\n y = torch.max(y, torch.zeros(y.shape[0], y.shape[1], y.shape[2], y.\n shape[3]).cuda())\n y = torch.min(y, torch.ones(y.shape[0], y.shape[1], y.shape[2], y.\n shape[3]).cuda())\n return y\n", "step-5": "import torch\nimport torch.nn as nn\n\nclass DehazeNet(nn.Module):\n def __init__(self, input=16, groups=4):\n super(DehazeNet, self).__init__()\n\n self.conv1 = nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5)\n self.relu1 = nn.ReLU()\n\n self.conv2 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=3, padding=1)\n self.relu2 = nn.ReLU()\n self.conv3 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=5, padding=2)\n self.relu3 = nn.ReLU()\n self.conv4 = nn.Conv2d(in_channels=4, out_channels=16, kernel_size=7, padding=3)\n self.relu4 = nn.ReLU()\n self.maxpool = nn.MaxPool2d(kernel_size=7, stride=1)\n self.conv5 = nn.Conv2d(in_channels=48, out_channels=1, kernel_size=6)\n \n \n def forward(self, x):\n #feature extraction\n out = self.conv1(x)\n out = self.relu1(out)\n #maxout\n max_1 = torch.max(out[:,0:4,:,:],out[:,4:8,:,:])\n max_2 = torch.max(out[:,8:12,:,:],out[:,12:16,:,:])\n out = torch.max(max_1,max_2)\n\n #multi-scale Mapping\n out1 = self.conv2(out)\n out1 = self.relu2(out1)\n out2 = self.conv3(out)\n out2 = self.relu3(out2)\n out3 = self.conv4(out)\n out3 = self.relu4(out3)\n y = torch.cat((out1,out2,out3), dim=1)\n #Local Extremum\n y = self.maxpool(y)\n #non-linear Regression\n y = self.conv5(y)\n y = torch.max(y, torch.zeros(y.shape[0],y.shape[1],y.shape[2],y.shape[3]).cuda())\n y = torch.min(y, torch.ones(y.shape[0],y.shape[1],y.shape[2],y.shape[3]).cuda())\n return y", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
import numpy as np import time import uuid from datetime import datetime log_host = "agent1" class State: def __init__(self, path, iterations): self.path = path self.iterations = iterations def run(self): assert 0, "run not implemented" class BruteForceAttackState(State): def run(self): os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10']) addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42']) for i in range(self.iterations): timestamp = datetime.now() log_id = uuid.uuid4() message = "Unsuccessful login attempt" os = os_val log_type = "Informational" host = log_host log_machine = addr_val log = str(timestamp)+"|"+str(log_id)+"|"+message+"|"+os+"|"+log_type+"|"+host+"|"+log_machine print(log) f = open(self.path, "a") f.write(log + "\n") f.close() time.sleep(0.2) class NoAlarmState(State): def run(self): for i in range(self.iterations): os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10']) addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42']) timestamp = datetime.now() log_id = uuid.uuid4() message = "Unsuccessful login attempt" os = os_val log_type = "Informational" host = log_host log_machine = addr_val log = str(timestamp)+"|"+str(log_id)+"|"+message+"|"+os+"|"+log_type+"|"+host+"|"+log_machine print(log) f = open(self.path, "a") f.write(log + "\n") f.close() time.sleep(1.5)
normal
{ "blob_id": "cf3b4e2c76091f95d24e8a987a63ece46503d6e8", "index": 3459, "step-1": "<mask token>\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n", "step-2": "<mask token>\n\n\nclass State:\n\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, 'run not implemented'\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n", "step-3": "<mask token>\nlog_host = 'agent1'\n\n\nclass State:\n\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, 'run not implemented'\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n", "step-4": "import numpy as np\nimport time\nimport uuid\nfrom datetime import datetime\nlog_host = 'agent1'\n\n\nclass State:\n\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, 'run not implemented'\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n", "step-5": "import numpy as np\nimport time\nimport uuid\nfrom datetime import datetime\n\n\nlog_host = \"agent1\"\n\n\nclass State:\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, \"run not implemented\"\n\n\nclass BruteForceAttackState(State):\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = \"Unsuccessful login attempt\"\n os = os_val\n log_type = \"Informational\"\n host = log_host\n log_machine = addr_val\n\n log = str(timestamp)+\"|\"+str(log_id)+\"|\"+message+\"|\"+os+\"|\"+log_type+\"|\"+host+\"|\"+log_machine\n print(log)\n\n f = open(self.path, \"a\")\n f.write(log + \"\\n\")\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = \"Unsuccessful login attempt\"\n os = os_val\n log_type = \"Informational\"\n host = log_host\n log_machine = addr_val\n\n log = str(timestamp)+\"|\"+str(log_id)+\"|\"+message+\"|\"+os+\"|\"+log_type+\"|\"+host+\"|\"+log_machine\n print(log)\n\n f = open(self.path, \"a\")\n f.write(log + \"\\n\")\n f.close()\n time.sleep(1.5)\n", "step-ids": [ 4, 7, 8, 9, 10 ] }
[ 4, 7, 8, 9, 10 ]
# coding: UTF-8 import fileinput import io from locale import str import os __author__ = 'lidong' def getDirList( p ): p = p.replace( "/","\\") if p[ -1] != "\\": p = p+"\\" a = os.listdir( p ) for x in a: if(os.path.isfile( p + x )): a, b = os.path.splitext( p + x ) if(0<b.find("bak")): print (p + x) os.remove( p + x) elif(os.path.isdir( p + x )): #.svn if(0<( p + x ).find(".svn")): for (p,d,f) in os.walk( p + x): if p.find('.svn')>0: print (p + x) os.popen('rd /s /q %s'%p) else : getDirList(p + x) def createFile( f ): if(os.path.isfile(f)): a_file = io.open( f, encoding='utf-8') print(a_file.readline()) else : return while 1==1: print ( getDirList( "D:\project" ) )
normal
{ "blob_id": "e553da92b1bb5dfaa0fb7c702f5be4f66201c75b", "index": 8843, "step-1": "<mask token>\n\n\ndef getDirList(p):\n p = p.replace('/', '\\\\')\n if p[-1] != '\\\\':\n p = p + '\\\\'\n a = os.listdir(p)\n for x in a:\n if os.path.isfile(p + x):\n a, b = os.path.splitext(p + x)\n if 0 < b.find('bak'):\n print(p + x)\n os.remove(p + x)\n elif os.path.isdir(p + x):\n if 0 < (p + x).find('.svn'):\n for p, d, f in os.walk(p + x):\n if p.find('.svn') > 0:\n print(p + x)\n os.popen('rd /s /q %s' % p)\n else:\n getDirList(p + x)\n\n\ndef createFile(f):\n if os.path.isfile(f):\n a_file = io.open(f, encoding='utf-8')\n print(a_file.readline())\n else:\n return\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef getDirList(p):\n p = p.replace('/', '\\\\')\n if p[-1] != '\\\\':\n p = p + '\\\\'\n a = os.listdir(p)\n for x in a:\n if os.path.isfile(p + x):\n a, b = os.path.splitext(p + x)\n if 0 < b.find('bak'):\n print(p + x)\n os.remove(p + x)\n elif os.path.isdir(p + x):\n if 0 < (p + x).find('.svn'):\n for p, d, f in os.walk(p + x):\n if p.find('.svn') > 0:\n print(p + x)\n os.popen('rd /s /q %s' % p)\n else:\n getDirList(p + x)\n\n\ndef createFile(f):\n if os.path.isfile(f):\n a_file = io.open(f, encoding='utf-8')\n print(a_file.readline())\n else:\n return\n\n\nwhile 1 == 1:\n print(getDirList('D:\\\\project'))\n", "step-3": "<mask token>\n__author__ = 'lidong'\n\n\ndef getDirList(p):\n p = p.replace('/', '\\\\')\n if p[-1] != '\\\\':\n p = p + '\\\\'\n a = os.listdir(p)\n for x in a:\n if os.path.isfile(p + x):\n a, b = os.path.splitext(p + x)\n if 0 < b.find('bak'):\n print(p + x)\n os.remove(p + x)\n elif os.path.isdir(p + x):\n if 0 < (p + x).find('.svn'):\n for p, d, f in os.walk(p + x):\n if p.find('.svn') > 0:\n print(p + x)\n os.popen('rd /s /q %s' % p)\n else:\n getDirList(p + x)\n\n\ndef createFile(f):\n if os.path.isfile(f):\n a_file = io.open(f, encoding='utf-8')\n print(a_file.readline())\n else:\n return\n\n\nwhile 1 == 1:\n print(getDirList('D:\\\\project'))\n", "step-4": "import fileinput\nimport io\nfrom locale import str\nimport os\n__author__ = 'lidong'\n\n\ndef getDirList(p):\n p = p.replace('/', '\\\\')\n if p[-1] != '\\\\':\n p = p + '\\\\'\n a = os.listdir(p)\n for x in a:\n if os.path.isfile(p + x):\n a, b = os.path.splitext(p + x)\n if 0 < b.find('bak'):\n print(p + x)\n os.remove(p + x)\n elif os.path.isdir(p + x):\n if 0 < (p + x).find('.svn'):\n for p, d, f in os.walk(p + x):\n if p.find('.svn') > 0:\n print(p + x)\n os.popen('rd /s /q %s' % p)\n else:\n getDirList(p + x)\n\n\ndef createFile(f):\n if os.path.isfile(f):\n a_file = io.open(f, encoding='utf-8')\n print(a_file.readline())\n else:\n return\n\n\nwhile 1 == 1:\n print(getDirList('D:\\\\project'))\n", "step-5": "# coding: UTF-8\nimport fileinput\nimport io\nfrom locale import str\nimport os\n\n__author__ = 'lidong'\n\n\ndef getDirList( p ):\n p = p.replace( \"/\",\"\\\\\")\n if p[ -1] != \"\\\\\":\n p = p+\"\\\\\"\n a = os.listdir( p )\n for x in a:\n if(os.path.isfile( p + x )):\n a, b = os.path.splitext( p + x )\n if(0<b.find(\"bak\")):\n print (p + x)\n os.remove( p + x)\n elif(os.path.isdir( p + x )): #.svn\n if(0<( p + x ).find(\".svn\")):\n for (p,d,f) in os.walk( p + x):\n if p.find('.svn')>0:\n print (p + x)\n os.popen('rd /s /q %s'%p)\n else :\n getDirList(p + x)\n\ndef createFile( f ):\n if(os.path.isfile(f)):\n a_file = io.open( f, encoding='utf-8')\n print(a_file.readline())\n else :\n\n return\n\nwhile 1==1:\n print ( getDirList( \"D:\\project\" ) )\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def kind(): data = {} with open('dataset.json', 'r') as read_file: data = json.load(read_file) return data['kind'] <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def kind(): data = {} with open('dataset.json', 'r') as read_file: data = json.load(read_file) return data['kind'] def items(): data = {} with open('dataset.json', 'r') as read_file: data = json.load(read_file) return data['items'] <|reserved_special_token_1|> import urllib.request import json def kind(): data = {} with open('dataset.json', 'r') as read_file: data = json.load(read_file) return data['kind'] def items(): data = {} with open('dataset.json', 'r') as read_file: data = json.load(read_file) return data['items'] <|reserved_special_token_1|> import urllib.request import json def kind(): data={} with open("dataset.json", "r") as read_file: data = json.load(read_file) return data["kind"] def items(): data={} with open("dataset.json", "r") as read_file: data = json.load(read_file) return data["items"] #Can add a bunch of other things after refering to data
flexible
{ "blob_id": "630480e9458491a26ea9060bd36541a0d5805a11", "index": 647, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef kind():\n data = {}\n with open('dataset.json', 'r') as read_file:\n data = json.load(read_file)\n return data['kind']\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef kind():\n data = {}\n with open('dataset.json', 'r') as read_file:\n data = json.load(read_file)\n return data['kind']\n\n\ndef items():\n data = {}\n with open('dataset.json', 'r') as read_file:\n data = json.load(read_file)\n return data['items']\n", "step-4": "import urllib.request\nimport json\n\n\ndef kind():\n data = {}\n with open('dataset.json', 'r') as read_file:\n data = json.load(read_file)\n return data['kind']\n\n\ndef items():\n data = {}\n with open('dataset.json', 'r') as read_file:\n data = json.load(read_file)\n return data['items']\n", "step-5": "import urllib.request\nimport json\n\ndef kind():\n data={}\n with open(\"dataset.json\", \"r\") as read_file:\n data = json.load(read_file)\n return data[\"kind\"]\n\ndef items():\n data={}\n with open(\"dataset.json\", \"r\") as read_file:\n data = json.load(read_file)\n return data[\"items\"]\n\n#Can add a bunch of other things after refering to data\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def mais_populoso(dic): p = 0 sp = 0 for t, i in dic.items(): for m in dic[t].values(): p += m if p > sp: sp = p x = t return x
flexible
{ "blob_id": "2cbce618d1ec617d1c7dc0e9792b6a49361ec5a4", "index": 13, "step-1": "<mask token>\n", "step-2": "def mais_populoso(dic):\n p = 0\n sp = 0\n for t, i in dic.items():\n for m in dic[t].values():\n p += m\n if p > sp:\n sp = p\n x = t\n return x\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> print("Usa el punto '.' para los decimales") for contador in range(1, numalumnos + 1): print(f'\nDatos del alumno número {contador} de {numalumnos}:') teorica = float(input('- Introduce la nota de la parte teórica: ')) practica = float(input('- Introduce la nota de la parte practica: ')) nota = teorica * 60 / 100 + practica * 40 / 100 print(f'La nota final del alumno número {contador} es {nota:.2f}.\n') print('Ya se han calculado todas las notas.') <|reserved_special_token_1|> numalumnos = int(input('Introduce el número total de alumnos:\n')) print("Usa el punto '.' para los decimales") for contador in range(1, numalumnos + 1): print(f'\nDatos del alumno número {contador} de {numalumnos}:') teorica = float(input('- Introduce la nota de la parte teórica: ')) practica = float(input('- Introduce la nota de la parte practica: ')) nota = teorica * 60 / 100 + practica * 40 / 100 print(f'La nota final del alumno número {contador} es {nota:.2f}.\n') print('Ya se han calculado todas las notas.') <|reserved_special_token_1|> # Ejercicio 28 - Hoja VI (5) - Indicar la nota ponderada según el criterio dado # (parte teórica 60%, práctica 40%) de cada uno de un número determinado de alumnos numalumnos=int(input("Introduce el número total de alumnos:\n")) print("Usa el punto '.' para los decimales") for contador in range(1,numalumnos+1): print(f"\nDatos del alumno número {contador} de {numalumnos}:") teorica=float(input("- Introduce la nota de la parte teórica: ")) practica=float(input("- Introduce la nota de la parte practica: ")) nota=(teorica*60/100)+(practica*40/100) print(f"La nota final del alumno número {contador} es {nota:.2f}.\n") print("Ya se han calculado todas las notas.")
flexible
{ "blob_id": "f2056ff46ce6e38c3b6ca553bbdec7f59d60b198", "index": 1417, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(\"Usa el punto '.' para los decimales\")\nfor contador in range(1, numalumnos + 1):\n print(f'\\nDatos del alumno número {contador} de {numalumnos}:')\n teorica = float(input('- Introduce la nota de la parte teórica: '))\n practica = float(input('- Introduce la nota de la parte practica: '))\n nota = teorica * 60 / 100 + practica * 40 / 100\n print(f'La nota final del alumno número {contador} es {nota:.2f}.\\n')\nprint('Ya se han calculado todas las notas.')\n", "step-3": "numalumnos = int(input('Introduce el número total de alumnos:\\n'))\nprint(\"Usa el punto '.' para los decimales\")\nfor contador in range(1, numalumnos + 1):\n print(f'\\nDatos del alumno número {contador} de {numalumnos}:')\n teorica = float(input('- Introduce la nota de la parte teórica: '))\n practica = float(input('- Introduce la nota de la parte practica: '))\n nota = teorica * 60 / 100 + practica * 40 / 100\n print(f'La nota final del alumno número {contador} es {nota:.2f}.\\n')\nprint('Ya se han calculado todas las notas.')\n", "step-4": "# Ejercicio 28 - Hoja VI (5) - Indicar la nota ponderada según el criterio dado\n# (parte teórica 60%, práctica 40%) de cada uno de un número determinado de alumnos\n\nnumalumnos=int(input(\"Introduce el número total de alumnos:\\n\"))\nprint(\"Usa el punto '.' para los decimales\")\nfor contador in range(1,numalumnos+1):\n print(f\"\\nDatos del alumno número {contador} de {numalumnos}:\")\n teorica=float(input(\"- Introduce la nota de la parte teórica: \"))\n practica=float(input(\"- Introduce la nota de la parte practica: \"))\n nota=(teorica*60/100)+(practica*40/100)\n print(f\"La nota final del alumno número {contador} es {nota:.2f}.\\n\")\nprint(\"Ya se han calculado todas las notas.\")\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase): def test_random_variables(self): params = self.make_params_array() params['loc'] = 2 params['scale'] = 5 expected_median = 2 - 5 * np.log(np.log(2)) results = GEVU.random_variables(params, 10000) found_median = np.median(results) self.assertEqual(results.shape, (1, 10000)) self.assertTrue(0.95 * expected_median < found_median) self.assertTrue(found_median < 1.05 * expected_median) <|reserved_special_token_0|> def test_scale_validation(self): params = self.make_params_array() params['scale'] = -1 self.assertRaises(InvalidParamsError, GEVU.validate, params) def test_shape_validation(self): params = self.make_params_array() params['shape'] = 1 self.assertRaises(InvalidParamsError, GEVU.validate, params) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase): def test_random_variables(self): params = self.make_params_array() params['loc'] = 2 params['scale'] = 5 expected_median = 2 - 5 * np.log(np.log(2)) results = GEVU.random_variables(params, 10000) found_median = np.median(results) self.assertEqual(results.shape, (1, 10000)) self.assertTrue(0.95 * expected_median < found_median) self.assertTrue(found_median < 1.05 * expected_median) def test_loc_validation(self): params = self.make_params_array() params['loc'] = np.NaN self.assertRaises(InvalidParamsError, GEVU.validate, params) def test_scale_validation(self): params = self.make_params_array() params['scale'] = -1 self.assertRaises(InvalidParamsError, GEVU.validate, params) def test_shape_validation(self): params = self.make_params_array() params['shape'] = 1 self.assertRaises(InvalidParamsError, GEVU.validate, params) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase): def test_random_variables(self): params = self.make_params_array() params['loc'] = 2 params['scale'] = 5 expected_median = 2 - 5 * np.log(np.log(2)) results = GEVU.random_variables(params, 10000) found_median = np.median(results) self.assertEqual(results.shape, (1, 10000)) self.assertTrue(0.95 * expected_median < found_median) self.assertTrue(found_median < 1.05 * expected_median) def test_loc_validation(self): params = self.make_params_array() params['loc'] = np.NaN self.assertRaises(InvalidParamsError, GEVU.validate, params) def test_scale_validation(self): params = self.make_params_array() params['scale'] = -1 self.assertRaises(InvalidParamsError, GEVU.validate, params) def test_shape_validation(self): params = self.make_params_array() params['shape'] = 1 self.assertRaises(InvalidParamsError, GEVU.validate, params) def make_params_array(self, length=1): assert isinstance(length, int) params = np.zeros((length,), dtype=[('input', 'u4'), ('output', 'u4'), ('loc', 'f4'), ('negative', 'b1'), ('scale', 'f4'), ( 'shape', 'f4'), ('minimum', 'f4'), ('maximum', 'f4')]) params['minimum'] = params['maximum'] = np.NaN params['loc'] = params['scale'] = 1 return params <|reserved_special_token_1|> from stats_arrays.distributions import GeneralizedExtremeValueUncertainty as GEVU from stats_arrays.errors import InvalidParamsError from ..base import UncertaintyTestCase import numpy as np class GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase): def test_random_variables(self): params = self.make_params_array() params['loc'] = 2 params['scale'] = 5 expected_median = 2 - 5 * np.log(np.log(2)) results = GEVU.random_variables(params, 10000) found_median = np.median(results) self.assertEqual(results.shape, (1, 10000)) self.assertTrue(0.95 * expected_median < found_median) self.assertTrue(found_median < 1.05 * expected_median) def test_loc_validation(self): params = self.make_params_array() params['loc'] = np.NaN self.assertRaises(InvalidParamsError, GEVU.validate, params) def test_scale_validation(self): params = self.make_params_array() params['scale'] = -1 self.assertRaises(InvalidParamsError, GEVU.validate, params) def test_shape_validation(self): params = self.make_params_array() params['shape'] = 1 self.assertRaises(InvalidParamsError, GEVU.validate, params) def make_params_array(self, length=1): assert isinstance(length, int) params = np.zeros((length,), dtype=[('input', 'u4'), ('output', 'u4'), ('loc', 'f4'), ('negative', 'b1'), ('scale', 'f4'), ( 'shape', 'f4'), ('minimum', 'f4'), ('maximum', 'f4')]) params['minimum'] = params['maximum'] = np.NaN params['loc'] = params['scale'] = 1 return params <|reserved_special_token_1|> from stats_arrays.distributions import GeneralizedExtremeValueUncertainty as GEVU from stats_arrays.errors import InvalidParamsError from ..base import UncertaintyTestCase import numpy as np class GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase): def test_random_variables(self): params = self.make_params_array() params['loc'] = 2 params['scale'] = 5 # Formula for median (loc - scale * ln ln 2) expected_median = 2 - 5 * np.log(np.log(2)) results = GEVU.random_variables(params, 10000) found_median = np.median(results) self.assertEqual(results.shape, (1, 10000)) self.assertTrue(0.95 * expected_median < found_median) self.assertTrue(found_median < 1.05 * expected_median) def test_loc_validation(self): params = self.make_params_array() params['loc'] = np.NaN self.assertRaises( InvalidParamsError, GEVU.validate, params ) def test_scale_validation(self): params = self.make_params_array() params['scale'] = -1 self.assertRaises( InvalidParamsError, GEVU.validate, params ) def test_shape_validation(self): params = self.make_params_array() params['shape'] = 1 self.assertRaises( InvalidParamsError, GEVU.validate, params ) def make_params_array(self, length=1): assert isinstance(length, int) params = np.zeros((length,), dtype=[ ('input', 'u4'), ('output', 'u4'), ('loc', 'f4'), ('negative', 'b1'), ('scale', 'f4'), ('shape', 'f4'), ('minimum', 'f4'), ('maximum', 'f4') ]) params['minimum'] = params['maximum'] = np.NaN params['loc'] = params['scale'] = 1 return params
flexible
{ "blob_id": "997c1c86848b59a3986a579d5b1b50313fdfdf44", "index": 8161, "step-1": "<mask token>\n\n\nclass GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase):\n\n def test_random_variables(self):\n params = self.make_params_array()\n params['loc'] = 2\n params['scale'] = 5\n expected_median = 2 - 5 * np.log(np.log(2))\n results = GEVU.random_variables(params, 10000)\n found_median = np.median(results)\n self.assertEqual(results.shape, (1, 10000))\n self.assertTrue(0.95 * expected_median < found_median)\n self.assertTrue(found_median < 1.05 * expected_median)\n <mask token>\n\n def test_scale_validation(self):\n params = self.make_params_array()\n params['scale'] = -1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def test_shape_validation(self):\n params = self.make_params_array()\n params['shape'] = 1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n <mask token>\n", "step-2": "<mask token>\n\n\nclass GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase):\n\n def test_random_variables(self):\n params = self.make_params_array()\n params['loc'] = 2\n params['scale'] = 5\n expected_median = 2 - 5 * np.log(np.log(2))\n results = GEVU.random_variables(params, 10000)\n found_median = np.median(results)\n self.assertEqual(results.shape, (1, 10000))\n self.assertTrue(0.95 * expected_median < found_median)\n self.assertTrue(found_median < 1.05 * expected_median)\n\n def test_loc_validation(self):\n params = self.make_params_array()\n params['loc'] = np.NaN\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def test_scale_validation(self):\n params = self.make_params_array()\n params['scale'] = -1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def test_shape_validation(self):\n params = self.make_params_array()\n params['shape'] = 1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n <mask token>\n", "step-3": "<mask token>\n\n\nclass GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase):\n\n def test_random_variables(self):\n params = self.make_params_array()\n params['loc'] = 2\n params['scale'] = 5\n expected_median = 2 - 5 * np.log(np.log(2))\n results = GEVU.random_variables(params, 10000)\n found_median = np.median(results)\n self.assertEqual(results.shape, (1, 10000))\n self.assertTrue(0.95 * expected_median < found_median)\n self.assertTrue(found_median < 1.05 * expected_median)\n\n def test_loc_validation(self):\n params = self.make_params_array()\n params['loc'] = np.NaN\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def test_scale_validation(self):\n params = self.make_params_array()\n params['scale'] = -1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def test_shape_validation(self):\n params = self.make_params_array()\n params['shape'] = 1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def make_params_array(self, length=1):\n assert isinstance(length, int)\n params = np.zeros((length,), dtype=[('input', 'u4'), ('output',\n 'u4'), ('loc', 'f4'), ('negative', 'b1'), ('scale', 'f4'), (\n 'shape', 'f4'), ('minimum', 'f4'), ('maximum', 'f4')])\n params['minimum'] = params['maximum'] = np.NaN\n params['loc'] = params['scale'] = 1\n return params\n", "step-4": "from stats_arrays.distributions import GeneralizedExtremeValueUncertainty as GEVU\nfrom stats_arrays.errors import InvalidParamsError\nfrom ..base import UncertaintyTestCase\nimport numpy as np\n\n\nclass GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase):\n\n def test_random_variables(self):\n params = self.make_params_array()\n params['loc'] = 2\n params['scale'] = 5\n expected_median = 2 - 5 * np.log(np.log(2))\n results = GEVU.random_variables(params, 10000)\n found_median = np.median(results)\n self.assertEqual(results.shape, (1, 10000))\n self.assertTrue(0.95 * expected_median < found_median)\n self.assertTrue(found_median < 1.05 * expected_median)\n\n def test_loc_validation(self):\n params = self.make_params_array()\n params['loc'] = np.NaN\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def test_scale_validation(self):\n params = self.make_params_array()\n params['scale'] = -1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def test_shape_validation(self):\n params = self.make_params_array()\n params['shape'] = 1\n self.assertRaises(InvalidParamsError, GEVU.validate, params)\n\n def make_params_array(self, length=1):\n assert isinstance(length, int)\n params = np.zeros((length,), dtype=[('input', 'u4'), ('output',\n 'u4'), ('loc', 'f4'), ('negative', 'b1'), ('scale', 'f4'), (\n 'shape', 'f4'), ('minimum', 'f4'), ('maximum', 'f4')])\n params['minimum'] = params['maximum'] = np.NaN\n params['loc'] = params['scale'] = 1\n return params\n", "step-5": "from stats_arrays.distributions import GeneralizedExtremeValueUncertainty as GEVU\nfrom stats_arrays.errors import InvalidParamsError\nfrom ..base import UncertaintyTestCase\nimport numpy as np\n\n\nclass GeneralizedExtremeValueUncertaintyTestCase(UncertaintyTestCase):\n\n def test_random_variables(self):\n params = self.make_params_array()\n params['loc'] = 2\n params['scale'] = 5\n # Formula for median (loc - scale * ln ln 2)\n expected_median = 2 - 5 * np.log(np.log(2))\n results = GEVU.random_variables(params, 10000)\n found_median = np.median(results)\n self.assertEqual(results.shape, (1, 10000))\n self.assertTrue(0.95 * expected_median < found_median)\n self.assertTrue(found_median < 1.05 * expected_median)\n\n def test_loc_validation(self):\n params = self.make_params_array()\n params['loc'] = np.NaN\n self.assertRaises(\n InvalidParamsError,\n GEVU.validate,\n params\n )\n\n def test_scale_validation(self):\n params = self.make_params_array()\n params['scale'] = -1\n self.assertRaises(\n InvalidParamsError,\n GEVU.validate,\n params\n )\n\n def test_shape_validation(self):\n params = self.make_params_array()\n params['shape'] = 1\n self.assertRaises(\n InvalidParamsError,\n GEVU.validate,\n params\n )\n\n def make_params_array(self, length=1):\n assert isinstance(length, int)\n params = np.zeros((length,), dtype=[\n ('input', 'u4'),\n ('output', 'u4'),\n ('loc', 'f4'),\n ('negative', 'b1'),\n ('scale', 'f4'),\n ('shape', 'f4'),\n ('minimum', 'f4'),\n ('maximum', 'f4')\n ])\n params['minimum'] = params['maximum'] = np.NaN\n params['loc'] = params['scale'] = 1\n return params\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
<|reserved_special_token_0|> class RegPropData: <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def __init__(self, csv_path): """ Initialize a region proposal data instance. Parameters ---------- csv_path : str Path to csv file containing proposal information. Note ---- It is assumed that the directory containing the proposals csv file has `properties_session.cv` file. This file should contain information about current session. """ fdops.check_if_file_exists(csv_path) self._df = pd.read_csv(csv_path) self.props = self._get_properties(csv_path) def _get_properties(self, csv_path): """ Creates a dictionary containing properties of proposal data. Parameters ---------- csv_path : str Path to csv file containing proposal information """ props = {} loc, fname, ext = fdops.get_loc_name_ext(csv_path) props['loc'] = loc props['name'] = fname props['ext'] = ext props['W'] = self._df['W'].unique().item() props['H'] = self._df['H'].unique().item() props['FPS'] = self._df['FPS'].unique().item() props['dur'] = self._df['dur'].unique().item() props['vname'] = self._get_video_name(fname) props['num_props'] = self._get_num_proposals() return props def write_proposals_to_video(self, vdir, frms_per_sec=1.0): """ Writes proposals to video. Parameters ---------- vdir : str Directory where we can find video. frms_per_sec : float, default 1 A value of 0.5 means that we will skip `FPS x 1/(frms_per_sec) = 60` frames """ vid_name = self.props['vname'] vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4']) if len(vfpath) > 1: raise Exception(f'More than one video found\n\t{vfpath}') vin = VidReader(vfpath[0]) ovid_path = f"{self.props['loc']}/{self.props['name']}.mp4" vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec': 'libx264', '-r': '30'}) f0_start = 0 f0_end = vin.props['num_frames'] - 1 f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec) f0s = list(range(f0_start, f0_end, int(f0_skip))) for f0 in tqdm(f0s): frm = vin.get_frame(f0, c='bgr') props = self._get_proposals_for_frame(f0) for p in props: if len(p) > 0: w0, h0, w, h = p frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h), (0, 256, 0), 1) vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)) vw.close() vin.release() import sys sys.exit() <|reserved_special_token_0|> def _get_video_name(self, fname): """ Returns video name by parsing csv file name Parameters ---------- fname : str Name of csv file having proposals """ csv_name_split = fname.split('_') thirty_fps_loc = csv_name_split.index('30fps') video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1]) return video_name <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class RegPropData: <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def __init__(self, csv_path): """ Initialize a region proposal data instance. Parameters ---------- csv_path : str Path to csv file containing proposal information. Note ---- It is assumed that the directory containing the proposals csv file has `properties_session.cv` file. This file should contain information about current session. """ fdops.check_if_file_exists(csv_path) self._df = pd.read_csv(csv_path) self.props = self._get_properties(csv_path) def _get_properties(self, csv_path): """ Creates a dictionary containing properties of proposal data. Parameters ---------- csv_path : str Path to csv file containing proposal information """ props = {} loc, fname, ext = fdops.get_loc_name_ext(csv_path) props['loc'] = loc props['name'] = fname props['ext'] = ext props['W'] = self._df['W'].unique().item() props['H'] = self._df['H'].unique().item() props['FPS'] = self._df['FPS'].unique().item() props['dur'] = self._df['dur'].unique().item() props['vname'] = self._get_video_name(fname) props['num_props'] = self._get_num_proposals() return props def write_proposals_to_video(self, vdir, frms_per_sec=1.0): """ Writes proposals to video. Parameters ---------- vdir : str Directory where we can find video. frms_per_sec : float, default 1 A value of 0.5 means that we will skip `FPS x 1/(frms_per_sec) = 60` frames """ vid_name = self.props['vname'] vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4']) if len(vfpath) > 1: raise Exception(f'More than one video found\n\t{vfpath}') vin = VidReader(vfpath[0]) ovid_path = f"{self.props['loc']}/{self.props['name']}.mp4" vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec': 'libx264', '-r': '30'}) f0_start = 0 f0_end = vin.props['num_frames'] - 1 f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec) f0s = list(range(f0_start, f0_end, int(f0_skip))) for f0 in tqdm(f0s): frm = vin.get_frame(f0, c='bgr') props = self._get_proposals_for_frame(f0) for p in props: if len(p) > 0: w0, h0, w, h = p frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h), (0, 256, 0), 1) vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)) vw.close() vin.release() import sys sys.exit() def _get_proposals_for_frame(self, fn): """ Returns a list of proposal regions Parameters ---------- fn : int Frame number """ tdf = self._df.copy() tdf['f1'] = tdf['f0'] + tdf['f'] - 1 df = tdf[fn >= tdf['f0']] df = df[fn <= df['f1']] if len(df) == 0: return [] if len(df) > 1: pdb.set_trace() raise Exception(f'USER_ERROR: proposals csv is fishy\n{df}') prop_list = df['props'].item().split(':') if len(prop_list) > 0: props = [] for p in prop_list: coords = p.split('-') if len(coords) == 4: props += [[int(x) for x in coords]] return props def _get_video_name(self, fname): """ Returns video name by parsing csv file name Parameters ---------- fname : str Name of csv file having proposals """ csv_name_split = fname.split('_') thirty_fps_loc = csv_name_split.index('30fps') video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1]) return video_name def _get_num_proposals(self): """ Returns number of proposals. """ total_props = self._df['nprops'].sum() return total_props <|reserved_special_token_1|> <|reserved_special_token_0|> class RegPropData: <|reserved_special_token_0|> _df = None props = None <|reserved_special_token_0|> def __init__(self, csv_path): """ Initialize a region proposal data instance. Parameters ---------- csv_path : str Path to csv file containing proposal information. Note ---- It is assumed that the directory containing the proposals csv file has `properties_session.cv` file. This file should contain information about current session. """ fdops.check_if_file_exists(csv_path) self._df = pd.read_csv(csv_path) self.props = self._get_properties(csv_path) def _get_properties(self, csv_path): """ Creates a dictionary containing properties of proposal data. Parameters ---------- csv_path : str Path to csv file containing proposal information """ props = {} loc, fname, ext = fdops.get_loc_name_ext(csv_path) props['loc'] = loc props['name'] = fname props['ext'] = ext props['W'] = self._df['W'].unique().item() props['H'] = self._df['H'].unique().item() props['FPS'] = self._df['FPS'].unique().item() props['dur'] = self._df['dur'].unique().item() props['vname'] = self._get_video_name(fname) props['num_props'] = self._get_num_proposals() return props def write_proposals_to_video(self, vdir, frms_per_sec=1.0): """ Writes proposals to video. Parameters ---------- vdir : str Directory where we can find video. frms_per_sec : float, default 1 A value of 0.5 means that we will skip `FPS x 1/(frms_per_sec) = 60` frames """ vid_name = self.props['vname'] vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4']) if len(vfpath) > 1: raise Exception(f'More than one video found\n\t{vfpath}') vin = VidReader(vfpath[0]) ovid_path = f"{self.props['loc']}/{self.props['name']}.mp4" vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec': 'libx264', '-r': '30'}) f0_start = 0 f0_end = vin.props['num_frames'] - 1 f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec) f0s = list(range(f0_start, f0_end, int(f0_skip))) for f0 in tqdm(f0s): frm = vin.get_frame(f0, c='bgr') props = self._get_proposals_for_frame(f0) for p in props: if len(p) > 0: w0, h0, w, h = p frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h), (0, 256, 0), 1) vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)) vw.close() vin.release() import sys sys.exit() def _get_proposals_for_frame(self, fn): """ Returns a list of proposal regions Parameters ---------- fn : int Frame number """ tdf = self._df.copy() tdf['f1'] = tdf['f0'] + tdf['f'] - 1 df = tdf[fn >= tdf['f0']] df = df[fn <= df['f1']] if len(df) == 0: return [] if len(df) > 1: pdb.set_trace() raise Exception(f'USER_ERROR: proposals csv is fishy\n{df}') prop_list = df['props'].item().split(':') if len(prop_list) > 0: props = [] for p in prop_list: coords = p.split('-') if len(coords) == 4: props += [[int(x) for x in coords]] return props def _get_video_name(self, fname): """ Returns video name by parsing csv file name Parameters ---------- fname : str Name of csv file having proposals """ csv_name_split = fname.split('_') thirty_fps_loc = csv_name_split.index('30fps') video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1]) return video_name def _get_num_proposals(self): """ Returns number of proposals. """ total_props = self._df['nprops'].sum() return total_props <|reserved_special_token_1|> <|reserved_special_token_0|> class RegPropData: """ Processes region proposal data. """ _df = None props = None """Dictionary containing region proposal data properties """ def __init__(self, csv_path): """ Initialize a region proposal data instance. Parameters ---------- csv_path : str Path to csv file containing proposal information. Note ---- It is assumed that the directory containing the proposals csv file has `properties_session.cv` file. This file should contain information about current session. """ fdops.check_if_file_exists(csv_path) self._df = pd.read_csv(csv_path) self.props = self._get_properties(csv_path) def _get_properties(self, csv_path): """ Creates a dictionary containing properties of proposal data. Parameters ---------- csv_path : str Path to csv file containing proposal information """ props = {} loc, fname, ext = fdops.get_loc_name_ext(csv_path) props['loc'] = loc props['name'] = fname props['ext'] = ext props['W'] = self._df['W'].unique().item() props['H'] = self._df['H'].unique().item() props['FPS'] = self._df['FPS'].unique().item() props['dur'] = self._df['dur'].unique().item() props['vname'] = self._get_video_name(fname) props['num_props'] = self._get_num_proposals() return props def write_proposals_to_video(self, vdir, frms_per_sec=1.0): """ Writes proposals to video. Parameters ---------- vdir : str Directory where we can find video. frms_per_sec : float, default 1 A value of 0.5 means that we will skip `FPS x 1/(frms_per_sec) = 60` frames """ vid_name = self.props['vname'] vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4']) if len(vfpath) > 1: raise Exception(f'More than one video found\n\t{vfpath}') vin = VidReader(vfpath[0]) ovid_path = f"{self.props['loc']}/{self.props['name']}.mp4" vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec': 'libx264', '-r': '30'}) f0_start = 0 f0_end = vin.props['num_frames'] - 1 f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec) f0s = list(range(f0_start, f0_end, int(f0_skip))) for f0 in tqdm(f0s): frm = vin.get_frame(f0, c='bgr') props = self._get_proposals_for_frame(f0) for p in props: if len(p) > 0: w0, h0, w, h = p frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h), (0, 256, 0), 1) vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)) vw.close() vin.release() import sys sys.exit() def _get_proposals_for_frame(self, fn): """ Returns a list of proposal regions Parameters ---------- fn : int Frame number """ tdf = self._df.copy() tdf['f1'] = tdf['f0'] + tdf['f'] - 1 df = tdf[fn >= tdf['f0']] df = df[fn <= df['f1']] if len(df) == 0: return [] if len(df) > 1: pdb.set_trace() raise Exception(f'USER_ERROR: proposals csv is fishy\n{df}') prop_list = df['props'].item().split(':') if len(prop_list) > 0: props = [] for p in prop_list: coords = p.split('-') if len(coords) == 4: props += [[int(x) for x in coords]] return props def _get_video_name(self, fname): """ Returns video name by parsing csv file name Parameters ---------- fname : str Name of csv file having proposals """ csv_name_split = fname.split('_') thirty_fps_loc = csv_name_split.index('30fps') video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1]) return video_name def _get_num_proposals(self): """ Returns number of proposals. """ total_props = self._df['nprops'].sum() return total_props <|reserved_special_token_1|> import cv2 import pdb import skvideo import numpy as np import pandas as pd from tqdm import tqdm from harp import fdops from word2number import w2n from harp.vid import VidReader class RegPropData: """ Processes region proposal data. """ _df = None props = None """Dictionary containing region proposal data properties """ def __init__(self, csv_path): """ Initialize a region proposal data instance. Parameters ---------- csv_path : str Path to csv file containing proposal information. Note ---- It is assumed that the directory containing the proposals csv file has `properties_session.cv` file. This file should contain information about current session. """ # Checking files fdops.check_if_file_exists(csv_path) # loading proposal data as a data frame self._df = pd.read_csv(csv_path) # Dictionary containing proposal properties self.props = self._get_properties(csv_path) def _get_properties(self, csv_path): """ Creates a dictionary containing properties of proposal data. Parameters ---------- csv_path : str Path to csv file containing proposal information """ props = {} # File properties loc, fname, ext = fdops.get_loc_name_ext(csv_path) props['loc'] = loc props['name'] = fname props['ext'] = ext # Video properties props['W'] = self._df['W'].unique().item() props['H'] = self._df['H'].unique().item() props['FPS'] = self._df['FPS'].unique().item() props['dur'] = self._df['dur'].unique().item() props['vname'] = self._get_video_name(fname) # Proposal properties props['num_props'] = self._get_num_proposals() return props def write_proposals_to_video(self, vdir, frms_per_sec=1.0): """ Writes proposals to video. Parameters ---------- vdir : str Directory where we can find video. frms_per_sec : float, default 1 A value of 0.5 means that we will skip `FPS x 1/(frms_per_sec) = 60` frames """ # Input video vid_name = self.props['vname'] vfpath = fdops.get_files_with_kws(vdir, [vid_name, ".mp4"]) if len(vfpath) > 1: raise Exception(f"More than one video found\n\t{vfpath}") vin = VidReader(vfpath[0]) # Output video ovid_path = f"{self.props['loc']}/{self.props['name']}.mp4" vw = skvideo.io.FFmpegWriter( ovid_path, outputdict={'-vcodec': 'libx264','-r':'30'} ) # Calculate frame numbers(POC) that we will use. f0_start = 0 # starting frame poc f0_end = vin.props['num_frames'] - 1 # ending frame poc f0_skip = vin.props['frame_rate']*(1/frms_per_sec) f0s = list(range(f0_start, f0_end, int(f0_skip))) # Loop over each frame number and draw proposal regions # over them for f0 in tqdm(f0s): frm = vin.get_frame(f0, c='bgr') # Get proposals for frame f0 props = self._get_proposals_for_frame(f0) # Proposals looop for p in props: if len(p) > 0: w0, h0, w, h = p frame = cv2.rectangle( frm, (w0, h0), (w0+w, h0+h), (0, 256, 0), 1 ) # Write frame to output vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)) vw.close() vin.release() import sys; sys.exit() def _get_proposals_for_frame(self, fn): """ Returns a list of proposal regions Parameters ---------- fn : int Frame number """ # Get dataframe that contains f0. It should have only one row tdf = self._df.copy() # lower bound tdf['f1'] = (tdf['f0'] # creating column + tdf['f'] - 1) # with last frame df = tdf[fn >= tdf['f0']] df = df[fn <= df['f1']] if len(df) == 0: return [] if len(df) > 1: pdb.set_trace() raise Exception("USER_ERROR: proposals csv is fishy\n" f"{df}") # Proposal string to numpy array prop_list = df['props'].item().split(":") # Loop over bounding box list and create a numpy array if len(prop_list) > 0: props = [] for p in prop_list: coords = p.split("-") if len(coords) == 4: props += [[int(x) for x in coords]] return props def _get_video_name(self, fname): """ Returns video name by parsing csv file name Parameters ---------- fname : str Name of csv file having proposals """ csv_name_split = fname.split("_") thirty_fps_loc = csv_name_split.index("30fps") video_name = "_".join(csv_name_split[0:thirty_fps_loc+1]) return video_name def _get_num_proposals(self): """ Returns number of proposals. """ total_props = self._df['nprops'].sum() return total_props
flexible
{ "blob_id": "b10badc172be119be5b2ab8ccc32cc95a0ed1e7a", "index": 2680, "step-1": "<mask token>\n\n\nclass RegPropData:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n <mask token>\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n <mask token>\n", "step-2": "<mask token>\n\n\nclass RegPropData:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n tdf = self._df.copy()\n tdf['f1'] = tdf['f0'] + tdf['f'] - 1\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(f'USER_ERROR: proposals csv is fishy\\n{df}')\n prop_list = df['props'].item().split(':')\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split('-')\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n", "step-3": "<mask token>\n\n\nclass RegPropData:\n <mask token>\n _df = None\n props = None\n <mask token>\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n tdf = self._df.copy()\n tdf['f1'] = tdf['f0'] + tdf['f'] - 1\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(f'USER_ERROR: proposals csv is fishy\\n{df}')\n prop_list = df['props'].item().split(':')\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split('-')\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n", "step-4": "<mask token>\n\n\nclass RegPropData:\n \"\"\"\n Processes region proposal data.\n \"\"\"\n _df = None\n props = None\n \"\"\"Dictionary containing region proposal data properties \"\"\"\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n tdf = self._df.copy()\n tdf['f1'] = tdf['f0'] + tdf['f'] - 1\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(f'USER_ERROR: proposals csv is fishy\\n{df}')\n prop_list = df['props'].item().split(':')\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split('-')\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n", "step-5": "import cv2\nimport pdb\nimport skvideo\nimport numpy as np\nimport pandas as pd\nfrom tqdm import tqdm\nfrom harp import fdops\nfrom word2number import w2n\nfrom harp.vid import VidReader\n\nclass RegPropData:\n \"\"\"\n Processes region proposal data.\n \"\"\"\n\n _df = None\n\n props = None\n \"\"\"Dictionary containing region proposal data properties \"\"\"\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n # Checking files\n fdops.check_if_file_exists(csv_path)\n\n # loading proposal data as a data frame\n self._df = pd.read_csv(csv_path)\n\n # Dictionary containing proposal properties\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n\n # File properties\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n\n # Video properties\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n\n # Proposal properties\n props['num_props'] = self._get_num_proposals()\n\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n # Input video\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, \".mp4\"])\n if len(vfpath) > 1:\n raise Exception(f\"More than one video found\\n\\t{vfpath}\")\n vin = VidReader(vfpath[0])\n\n # Output video\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(\n ovid_path,\n outputdict={'-vcodec': 'libx264','-r':'30'}\n )\n\n # Calculate frame numbers(POC) that we will use.\n f0_start = 0 # starting frame poc\n f0_end = vin.props['num_frames'] - 1 # ending frame poc\n f0_skip = vin.props['frame_rate']*(1/frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n\n # Loop over each frame number and draw proposal regions\n # over them\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n\n # Get proposals for frame f0\n props = self._get_proposals_for_frame(f0)\n\n # Proposals looop\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(\n frm, (w0, h0), (w0+w, h0+h), (0, 256, 0), 1\n )\n # Write frame to output\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n\n vw.close()\n vin.release()\n import sys; sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n\n # Get dataframe that contains f0. It should have only one row\n tdf = self._df.copy() # lower bound\n tdf['f1'] = (tdf['f0'] # creating column\n + tdf['f'] - 1) # with last frame\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(\"USER_ERROR: proposals csv is fishy\\n\"\n f\"{df}\")\n\n # Proposal string to numpy array\n prop_list = df['props'].item().split(\":\")\n\n # Loop over bounding box list and create a numpy array\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split(\"-\")\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split(\"_\")\n thirty_fps_loc = csv_name_split.index(\"30fps\")\n video_name = \"_\".join(csv_name_split[0:thirty_fps_loc+1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n", "step-ids": [ 5, 7, 8, 9, 11 ] }
[ 5, 7, 8, 9, 11 ]
<|reserved_special_token_0|> class Game_Service(object): def __init__(self, row_num, col_num): self._row_num = row_num self._col_num = col_num mine_percent = 0.3 self._mine_num = int(mine_percent * float(self._row_num * self. _col_num)) self.shifts = [-1, 0, 1] def generate_map(self): """ generate mine map """ global game game = Board(self._row_num, self._col_num) s = set([]) while len(s) <= self._mine_num: i = random.randint(0, self._row_num * self._col_num - 1) if i not in s: self._set_mine(i) s.add(i) return {'row_num': self._row_num, 'col_num': self._col_num} <|reserved_special_token_0|> <|reserved_special_token_0|> def _flip(self, update_stack, index): """ flip the chosen cell and its adjcent cells """ cell = game.get_cell(index) if cell.ifFlipped() == False: cell.flip() game.decrease_remain() if cell.isMine() == False and cell.get_neighbor() > 0: update_stack[str(index)] = cell.get_neighbor() return elif cell.isMine() == False and cell.get_neighbor() == 0: update_stack[str(index)] = cell.get_neighbor() temp_r = index / self._col_num temp_c = index % self._col_num shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in self.shifts if [temp_r + dr, temp_c + dc] != [ temp_r, temp_c] and temp_r + dr in range(0, self. _row_num) and temp_c + dc in range(0, self._col_num)] for s in shift: self._flip(update_stack, s[0] * self._col_num + s[1]) def _flipAll(self, update_stack): """ flip all mines """ mines_index = game.get_mines() for i in mines_index: update_stack[str(i)] = status['end'] update_stack['row_num'] = self._row_num update_stack['col_num'] = self._col_num update_stack['_mine_num'] = len(mines_index) if len(mines_index) == game.get_remain(): update_stack['type'] = 'win' else: update_stack['type'] = 'lose' <|reserved_special_token_1|> <|reserved_special_token_0|> class Game_Service(object): def __init__(self, row_num, col_num): self._row_num = row_num self._col_num = col_num mine_percent = 0.3 self._mine_num = int(mine_percent * float(self._row_num * self. _col_num)) self.shifts = [-1, 0, 1] def generate_map(self): """ generate mine map """ global game game = Board(self._row_num, self._col_num) s = set([]) while len(s) <= self._mine_num: i = random.randint(0, self._row_num * self._col_num - 1) if i not in s: self._set_mine(i) s.add(i) return {'row_num': self._row_num, 'col_num': self._col_num} def _set_mine(self, index): """ set cell[index] as a mine and update its neighbor cell's mine number """ game.get_cell(index).set_mine() game.add_mine(index) temp_r = index / self._col_num temp_c = index % self._col_num shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in self.shifts if [temp_r + dr, temp_c + dc] != [temp_r, temp_c] and temp_r + dr in range(0, self._row_num) and temp_c + dc in range (0, self._col_num)] for s in shift: game.get_cell(s[0] * self._col_num + s[1]).add_neighbor() <|reserved_special_token_0|> def _flip(self, update_stack, index): """ flip the chosen cell and its adjcent cells """ cell = game.get_cell(index) if cell.ifFlipped() == False: cell.flip() game.decrease_remain() if cell.isMine() == False and cell.get_neighbor() > 0: update_stack[str(index)] = cell.get_neighbor() return elif cell.isMine() == False and cell.get_neighbor() == 0: update_stack[str(index)] = cell.get_neighbor() temp_r = index / self._col_num temp_c = index % self._col_num shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in self.shifts if [temp_r + dr, temp_c + dc] != [ temp_r, temp_c] and temp_r + dr in range(0, self. _row_num) and temp_c + dc in range(0, self._col_num)] for s in shift: self._flip(update_stack, s[0] * self._col_num + s[1]) def _flipAll(self, update_stack): """ flip all mines """ mines_index = game.get_mines() for i in mines_index: update_stack[str(i)] = status['end'] update_stack['row_num'] = self._row_num update_stack['col_num'] = self._col_num update_stack['_mine_num'] = len(mines_index) if len(mines_index) == game.get_remain(): update_stack['type'] = 'win' else: update_stack['type'] = 'lose' <|reserved_special_token_1|> <|reserved_special_token_0|> class Game_Service(object): def __init__(self, row_num, col_num): self._row_num = row_num self._col_num = col_num mine_percent = 0.3 self._mine_num = int(mine_percent * float(self._row_num * self. _col_num)) self.shifts = [-1, 0, 1] def generate_map(self): """ generate mine map """ global game game = Board(self._row_num, self._col_num) s = set([]) while len(s) <= self._mine_num: i = random.randint(0, self._row_num * self._col_num - 1) if i not in s: self._set_mine(i) s.add(i) return {'row_num': self._row_num, 'col_num': self._col_num} def _set_mine(self, index): """ set cell[index] as a mine and update its neighbor cell's mine number """ game.get_cell(index).set_mine() game.add_mine(index) temp_r = index / self._col_num temp_c = index % self._col_num shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in self.shifts if [temp_r + dr, temp_c + dc] != [temp_r, temp_c] and temp_r + dr in range(0, self._row_num) and temp_c + dc in range (0, self._col_num)] for s in shift: game.get_cell(s[0] * self._col_num + s[1]).add_neighbor() def choose_mine(self, index): """ choose a cell return game status and cells need to change """ cell = game.get_cell(index) update_stack = {'type': 'continue'} if cell.isMine(): self._flipAll(update_stack) else: self._flip(update_stack, index) return update_stack def _flip(self, update_stack, index): """ flip the chosen cell and its adjcent cells """ cell = game.get_cell(index) if cell.ifFlipped() == False: cell.flip() game.decrease_remain() if cell.isMine() == False and cell.get_neighbor() > 0: update_stack[str(index)] = cell.get_neighbor() return elif cell.isMine() == False and cell.get_neighbor() == 0: update_stack[str(index)] = cell.get_neighbor() temp_r = index / self._col_num temp_c = index % self._col_num shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in self.shifts if [temp_r + dr, temp_c + dc] != [ temp_r, temp_c] and temp_r + dr in range(0, self. _row_num) and temp_c + dc in range(0, self._col_num)] for s in shift: self._flip(update_stack, s[0] * self._col_num + s[1]) def _flipAll(self, update_stack): """ flip all mines """ mines_index = game.get_mines() for i in mines_index: update_stack[str(i)] = status['end'] update_stack['row_num'] = self._row_num update_stack['col_num'] = self._col_num update_stack['_mine_num'] = len(mines_index) if len(mines_index) == game.get_remain(): update_stack['type'] = 'win' else: update_stack['type'] = 'lose' <|reserved_special_token_1|> from models import Cell, Board import random from pdb import set_trace as bp status = {'end': -1} game = None class Game_Service(object): def __init__(self, row_num, col_num): self._row_num = row_num self._col_num = col_num mine_percent = 0.3 self._mine_num = int(mine_percent * float(self._row_num * self. _col_num)) self.shifts = [-1, 0, 1] def generate_map(self): """ generate mine map """ global game game = Board(self._row_num, self._col_num) s = set([]) while len(s) <= self._mine_num: i = random.randint(0, self._row_num * self._col_num - 1) if i not in s: self._set_mine(i) s.add(i) return {'row_num': self._row_num, 'col_num': self._col_num} def _set_mine(self, index): """ set cell[index] as a mine and update its neighbor cell's mine number """ game.get_cell(index).set_mine() game.add_mine(index) temp_r = index / self._col_num temp_c = index % self._col_num shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in self.shifts if [temp_r + dr, temp_c + dc] != [temp_r, temp_c] and temp_r + dr in range(0, self._row_num) and temp_c + dc in range (0, self._col_num)] for s in shift: game.get_cell(s[0] * self._col_num + s[1]).add_neighbor() def choose_mine(self, index): """ choose a cell return game status and cells need to change """ cell = game.get_cell(index) update_stack = {'type': 'continue'} if cell.isMine(): self._flipAll(update_stack) else: self._flip(update_stack, index) return update_stack def _flip(self, update_stack, index): """ flip the chosen cell and its adjcent cells """ cell = game.get_cell(index) if cell.ifFlipped() == False: cell.flip() game.decrease_remain() if cell.isMine() == False and cell.get_neighbor() > 0: update_stack[str(index)] = cell.get_neighbor() return elif cell.isMine() == False and cell.get_neighbor() == 0: update_stack[str(index)] = cell.get_neighbor() temp_r = index / self._col_num temp_c = index % self._col_num shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in self.shifts if [temp_r + dr, temp_c + dc] != [ temp_r, temp_c] and temp_r + dr in range(0, self. _row_num) and temp_c + dc in range(0, self._col_num)] for s in shift: self._flip(update_stack, s[0] * self._col_num + s[1]) def _flipAll(self, update_stack): """ flip all mines """ mines_index = game.get_mines() for i in mines_index: update_stack[str(i)] = status['end'] update_stack['row_num'] = self._row_num update_stack['col_num'] = self._col_num update_stack['_mine_num'] = len(mines_index) if len(mines_index) == game.get_remain(): update_stack['type'] = 'win' else: update_stack['type'] = 'lose' <|reserved_special_token_1|> from models import Cell,Board import random from pdb import set_trace as bp status={'end':-1} game=None class Game_Service(object): def __init__(self,row_num,col_num): self._row_num=row_num self._col_num=col_num mine_percent=0.3 self._mine_num=int(mine_percent*float(self._row_num*self._col_num)) self.shifts=[-1,0,1] def generate_map(self): """ generate mine map """ global game game=Board(self._row_num,self._col_num) s=set([]) while len(s)<=self._mine_num: i=random.randint(0, self._row_num*self._col_num-1) if i not in s: self._set_mine(i) s.add(i) return {#'board':[game.get_board()[inx].get_neighbor() for inx in range(0,self._row_num*self._col_num)], #'mines':game.get_mines(), 'row_num':self._row_num, 'col_num':self._col_num} def _set_mine(self,index): """ set cell[index] as a mine and update its neighbor cell's mine number """ game.get_cell(index).set_mine() #set current index as mine game.add_mine(index) #add index to mine_index # add its neighbor's neighbor_num temp_r=index/self._col_num temp_c=index%self._col_num shift=[[temp_r+dr,temp_c+dc] for dr in self.shifts for dc in self.shifts if [temp_r+dr,temp_c+dc]!=[temp_r,temp_c] and temp_r+dr in range(0,self._row_num) and temp_c+dc in range(0,self._col_num)] for s in shift: game.get_cell(s[0]*self._col_num+s[1]).add_neighbor() def choose_mine(self,index): """ choose a cell return game status and cells need to change """ cell=game.get_cell(index) update_stack={'type':'continue'} if cell.isMine(): self._flipAll(update_stack) #clicked on a mine else: self._flip(update_stack,index) #clicked on a safe cell return update_stack def _flip(self,update_stack,index): """ flip the chosen cell and its adjcent cells """ cell=game.get_cell(index) if cell.ifFlipped()==False: cell.flip() game.decrease_remain() if cell.isMine()==False and cell.get_neighbor()>0: update_stack[str(index)]=cell.get_neighbor() return elif cell.isMine()==False and cell.get_neighbor()==0: update_stack[str(index)]=cell.get_neighbor() temp_r=index/self._col_num temp_c=index%self._col_num shift=[[temp_r+dr,temp_c+dc] for dr in self.shifts for dc in self.shifts if [temp_r+dr,temp_c+dc]!=[temp_r,temp_c] and temp_r+dr in range(0,self._row_num) and temp_c+dc in range(0,self._col_num)] for s in shift: self._flip(update_stack,s[0]*self._col_num+s[1]) def _flipAll(self,update_stack): """ flip all mines """ mines_index=game.get_mines() for i in mines_index: update_stack[str(i)]=status['end'] update_stack['row_num']=self._row_num update_stack['col_num']=self._col_num update_stack['_mine_num']=len(mines_index) if len(mines_index)==game.get_remain(): update_stack['type']='win' else: update_stack['type']='lose'
flexible
{ "blob_id": "4af72cab6444922ca66641a08d45bcfe5a689844", "index": 6763, "step-1": "<mask token>\n\n\nclass Game_Service(object):\n\n def __init__(self, row_num, col_num):\n self._row_num = row_num\n self._col_num = col_num\n mine_percent = 0.3\n self._mine_num = int(mine_percent * float(self._row_num * self.\n _col_num))\n self.shifts = [-1, 0, 1]\n\n def generate_map(self):\n \"\"\" generate mine map\n \"\"\"\n global game\n game = Board(self._row_num, self._col_num)\n s = set([])\n while len(s) <= self._mine_num:\n i = random.randint(0, self._row_num * self._col_num - 1)\n if i not in s:\n self._set_mine(i)\n s.add(i)\n return {'row_num': self._row_num, 'col_num': self._col_num}\n <mask token>\n <mask token>\n\n def _flip(self, update_stack, index):\n \"\"\" flip the chosen cell and its adjcent cells\n \"\"\"\n cell = game.get_cell(index)\n if cell.ifFlipped() == False:\n cell.flip()\n game.decrease_remain()\n if cell.isMine() == False and cell.get_neighbor() > 0:\n update_stack[str(index)] = cell.get_neighbor()\n return\n elif cell.isMine() == False and cell.get_neighbor() == 0:\n update_stack[str(index)] = cell.get_neighbor()\n temp_r = index / self._col_num\n temp_c = index % self._col_num\n shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for\n dc in self.shifts if [temp_r + dr, temp_c + dc] != [\n temp_r, temp_c] and temp_r + dr in range(0, self.\n _row_num) and temp_c + dc in range(0, self._col_num)]\n for s in shift:\n self._flip(update_stack, s[0] * self._col_num + s[1])\n\n def _flipAll(self, update_stack):\n \"\"\" flip all mines\n \"\"\"\n mines_index = game.get_mines()\n for i in mines_index:\n update_stack[str(i)] = status['end']\n update_stack['row_num'] = self._row_num\n update_stack['col_num'] = self._col_num\n update_stack['_mine_num'] = len(mines_index)\n if len(mines_index) == game.get_remain():\n update_stack['type'] = 'win'\n else:\n update_stack['type'] = 'lose'\n", "step-2": "<mask token>\n\n\nclass Game_Service(object):\n\n def __init__(self, row_num, col_num):\n self._row_num = row_num\n self._col_num = col_num\n mine_percent = 0.3\n self._mine_num = int(mine_percent * float(self._row_num * self.\n _col_num))\n self.shifts = [-1, 0, 1]\n\n def generate_map(self):\n \"\"\" generate mine map\n \"\"\"\n global game\n game = Board(self._row_num, self._col_num)\n s = set([])\n while len(s) <= self._mine_num:\n i = random.randint(0, self._row_num * self._col_num - 1)\n if i not in s:\n self._set_mine(i)\n s.add(i)\n return {'row_num': self._row_num, 'col_num': self._col_num}\n\n def _set_mine(self, index):\n \"\"\" set cell[index] as a mine\n and update its neighbor cell's mine number\n \"\"\"\n game.get_cell(index).set_mine()\n game.add_mine(index)\n temp_r = index / self._col_num\n temp_c = index % self._col_num\n shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in\n self.shifts if [temp_r + dr, temp_c + dc] != [temp_r, temp_c] and\n temp_r + dr in range(0, self._row_num) and temp_c + dc in range\n (0, self._col_num)]\n for s in shift:\n game.get_cell(s[0] * self._col_num + s[1]).add_neighbor()\n <mask token>\n\n def _flip(self, update_stack, index):\n \"\"\" flip the chosen cell and its adjcent cells\n \"\"\"\n cell = game.get_cell(index)\n if cell.ifFlipped() == False:\n cell.flip()\n game.decrease_remain()\n if cell.isMine() == False and cell.get_neighbor() > 0:\n update_stack[str(index)] = cell.get_neighbor()\n return\n elif cell.isMine() == False and cell.get_neighbor() == 0:\n update_stack[str(index)] = cell.get_neighbor()\n temp_r = index / self._col_num\n temp_c = index % self._col_num\n shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for\n dc in self.shifts if [temp_r + dr, temp_c + dc] != [\n temp_r, temp_c] and temp_r + dr in range(0, self.\n _row_num) and temp_c + dc in range(0, self._col_num)]\n for s in shift:\n self._flip(update_stack, s[0] * self._col_num + s[1])\n\n def _flipAll(self, update_stack):\n \"\"\" flip all mines\n \"\"\"\n mines_index = game.get_mines()\n for i in mines_index:\n update_stack[str(i)] = status['end']\n update_stack['row_num'] = self._row_num\n update_stack['col_num'] = self._col_num\n update_stack['_mine_num'] = len(mines_index)\n if len(mines_index) == game.get_remain():\n update_stack['type'] = 'win'\n else:\n update_stack['type'] = 'lose'\n", "step-3": "<mask token>\n\n\nclass Game_Service(object):\n\n def __init__(self, row_num, col_num):\n self._row_num = row_num\n self._col_num = col_num\n mine_percent = 0.3\n self._mine_num = int(mine_percent * float(self._row_num * self.\n _col_num))\n self.shifts = [-1, 0, 1]\n\n def generate_map(self):\n \"\"\" generate mine map\n \"\"\"\n global game\n game = Board(self._row_num, self._col_num)\n s = set([])\n while len(s) <= self._mine_num:\n i = random.randint(0, self._row_num * self._col_num - 1)\n if i not in s:\n self._set_mine(i)\n s.add(i)\n return {'row_num': self._row_num, 'col_num': self._col_num}\n\n def _set_mine(self, index):\n \"\"\" set cell[index] as a mine\n and update its neighbor cell's mine number\n \"\"\"\n game.get_cell(index).set_mine()\n game.add_mine(index)\n temp_r = index / self._col_num\n temp_c = index % self._col_num\n shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in\n self.shifts if [temp_r + dr, temp_c + dc] != [temp_r, temp_c] and\n temp_r + dr in range(0, self._row_num) and temp_c + dc in range\n (0, self._col_num)]\n for s in shift:\n game.get_cell(s[0] * self._col_num + s[1]).add_neighbor()\n\n def choose_mine(self, index):\n \"\"\" choose a cell\n return game status and cells need to change\n \"\"\"\n cell = game.get_cell(index)\n update_stack = {'type': 'continue'}\n if cell.isMine():\n self._flipAll(update_stack)\n else:\n self._flip(update_stack, index)\n return update_stack\n\n def _flip(self, update_stack, index):\n \"\"\" flip the chosen cell and its adjcent cells\n \"\"\"\n cell = game.get_cell(index)\n if cell.ifFlipped() == False:\n cell.flip()\n game.decrease_remain()\n if cell.isMine() == False and cell.get_neighbor() > 0:\n update_stack[str(index)] = cell.get_neighbor()\n return\n elif cell.isMine() == False and cell.get_neighbor() == 0:\n update_stack[str(index)] = cell.get_neighbor()\n temp_r = index / self._col_num\n temp_c = index % self._col_num\n shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for\n dc in self.shifts if [temp_r + dr, temp_c + dc] != [\n temp_r, temp_c] and temp_r + dr in range(0, self.\n _row_num) and temp_c + dc in range(0, self._col_num)]\n for s in shift:\n self._flip(update_stack, s[0] * self._col_num + s[1])\n\n def _flipAll(self, update_stack):\n \"\"\" flip all mines\n \"\"\"\n mines_index = game.get_mines()\n for i in mines_index:\n update_stack[str(i)] = status['end']\n update_stack['row_num'] = self._row_num\n update_stack['col_num'] = self._col_num\n update_stack['_mine_num'] = len(mines_index)\n if len(mines_index) == game.get_remain():\n update_stack['type'] = 'win'\n else:\n update_stack['type'] = 'lose'\n", "step-4": "from models import Cell, Board\nimport random\nfrom pdb import set_trace as bp\nstatus = {'end': -1}\ngame = None\n\n\nclass Game_Service(object):\n\n def __init__(self, row_num, col_num):\n self._row_num = row_num\n self._col_num = col_num\n mine_percent = 0.3\n self._mine_num = int(mine_percent * float(self._row_num * self.\n _col_num))\n self.shifts = [-1, 0, 1]\n\n def generate_map(self):\n \"\"\" generate mine map\n \"\"\"\n global game\n game = Board(self._row_num, self._col_num)\n s = set([])\n while len(s) <= self._mine_num:\n i = random.randint(0, self._row_num * self._col_num - 1)\n if i not in s:\n self._set_mine(i)\n s.add(i)\n return {'row_num': self._row_num, 'col_num': self._col_num}\n\n def _set_mine(self, index):\n \"\"\" set cell[index] as a mine\n and update its neighbor cell's mine number\n \"\"\"\n game.get_cell(index).set_mine()\n game.add_mine(index)\n temp_r = index / self._col_num\n temp_c = index % self._col_num\n shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for dc in\n self.shifts if [temp_r + dr, temp_c + dc] != [temp_r, temp_c] and\n temp_r + dr in range(0, self._row_num) and temp_c + dc in range\n (0, self._col_num)]\n for s in shift:\n game.get_cell(s[0] * self._col_num + s[1]).add_neighbor()\n\n def choose_mine(self, index):\n \"\"\" choose a cell\n return game status and cells need to change\n \"\"\"\n cell = game.get_cell(index)\n update_stack = {'type': 'continue'}\n if cell.isMine():\n self._flipAll(update_stack)\n else:\n self._flip(update_stack, index)\n return update_stack\n\n def _flip(self, update_stack, index):\n \"\"\" flip the chosen cell and its adjcent cells\n \"\"\"\n cell = game.get_cell(index)\n if cell.ifFlipped() == False:\n cell.flip()\n game.decrease_remain()\n if cell.isMine() == False and cell.get_neighbor() > 0:\n update_stack[str(index)] = cell.get_neighbor()\n return\n elif cell.isMine() == False and cell.get_neighbor() == 0:\n update_stack[str(index)] = cell.get_neighbor()\n temp_r = index / self._col_num\n temp_c = index % self._col_num\n shift = [[temp_r + dr, temp_c + dc] for dr in self.shifts for\n dc in self.shifts if [temp_r + dr, temp_c + dc] != [\n temp_r, temp_c] and temp_r + dr in range(0, self.\n _row_num) and temp_c + dc in range(0, self._col_num)]\n for s in shift:\n self._flip(update_stack, s[0] * self._col_num + s[1])\n\n def _flipAll(self, update_stack):\n \"\"\" flip all mines\n \"\"\"\n mines_index = game.get_mines()\n for i in mines_index:\n update_stack[str(i)] = status['end']\n update_stack['row_num'] = self._row_num\n update_stack['col_num'] = self._col_num\n update_stack['_mine_num'] = len(mines_index)\n if len(mines_index) == game.get_remain():\n update_stack['type'] = 'win'\n else:\n update_stack['type'] = 'lose'\n", "step-5": "\nfrom models import Cell,Board\nimport random\nfrom pdb import set_trace as bp\n\n\nstatus={'end':-1}\ngame=None\n\nclass Game_Service(object):\n\n def __init__(self,row_num,col_num):\n self._row_num=row_num\n self._col_num=col_num\n mine_percent=0.3\n self._mine_num=int(mine_percent*float(self._row_num*self._col_num))\n self.shifts=[-1,0,1]\n \n \n def generate_map(self):\n \"\"\" generate mine map\n \"\"\"\n global game\n game=Board(self._row_num,self._col_num)\n s=set([])\n while len(s)<=self._mine_num:\n i=random.randint(0, self._row_num*self._col_num-1)\n if i not in s:\n self._set_mine(i)\n s.add(i) \n return {#'board':[game.get_board()[inx].get_neighbor() for inx in range(0,self._row_num*self._col_num)],\n #'mines':game.get_mines(),\n 'row_num':self._row_num,\n 'col_num':self._col_num}\n \n\n def _set_mine(self,index):\n \"\"\" set cell[index] as a mine\n and update its neighbor cell's mine number\n \"\"\"\n game.get_cell(index).set_mine() #set current index as mine\n game.add_mine(index) #add index to mine_index\n\n # add its neighbor's neighbor_num \n temp_r=index/self._col_num\n temp_c=index%self._col_num\n shift=[[temp_r+dr,temp_c+dc] for dr in self.shifts for dc in self.shifts\n if [temp_r+dr,temp_c+dc]!=[temp_r,temp_c]\n and temp_r+dr in range(0,self._row_num)\n and temp_c+dc in range(0,self._col_num)]\n for s in shift:\n game.get_cell(s[0]*self._col_num+s[1]).add_neighbor()\n \n\n def choose_mine(self,index):\n \"\"\" choose a cell\n return game status and cells need to change\n \"\"\"\n cell=game.get_cell(index)\n update_stack={'type':'continue'}\n \n if cell.isMine():\n self._flipAll(update_stack) #clicked on a mine\n else:\n self._flip(update_stack,index) #clicked on a safe cell\n\n return update_stack\n \n\n def _flip(self,update_stack,index):\n \"\"\" flip the chosen cell and its adjcent cells\n \"\"\"\n cell=game.get_cell(index)\n if cell.ifFlipped()==False:\n cell.flip()\n game.decrease_remain()\n if cell.isMine()==False and cell.get_neighbor()>0:\n update_stack[str(index)]=cell.get_neighbor()\n return\n elif cell.isMine()==False and cell.get_neighbor()==0:\n update_stack[str(index)]=cell.get_neighbor()\n temp_r=index/self._col_num\n temp_c=index%self._col_num\n shift=[[temp_r+dr,temp_c+dc] for dr in self.shifts for dc in self.shifts\n if [temp_r+dr,temp_c+dc]!=[temp_r,temp_c]\n and temp_r+dr in range(0,self._row_num)\n and temp_c+dc in range(0,self._col_num)]\n for s in shift:\n self._flip(update_stack,s[0]*self._col_num+s[1])\n \n\n def _flipAll(self,update_stack):\n \"\"\" flip all mines\n \"\"\"\n mines_index=game.get_mines()\n for i in mines_index:\n update_stack[str(i)]=status['end']\n\n update_stack['row_num']=self._row_num\n update_stack['col_num']=self._col_num\n update_stack['_mine_num']=len(mines_index) \n if len(mines_index)==game.get_remain(): \n update_stack['type']='win' \n else:\n update_stack['type']='lose'\n \n \n \n \n \n", "step-ids": [ 5, 6, 7, 9, 10 ] }
[ 5, 6, 7, 9, 10 ]
#Program written and maintained by Matthew Meyerink #File responsible for defining the game based on user input from cpu_game import CPU_Game from warning_color import Warning class User_Game(CPU_Game): #Get the user phrase to start the game def get_user_phrase(self): correct_form = False while (not correct_form): correct_form = True #Recieve the input phrase self.phrase = input("Please input a phrase: ").upper() #Check to make sure no numbers or special characters in phrase for i in range(0, len(self.phrase)): alpha_space = (self.phrase[i].isalpha() or self.phrase[i].isspace()) if not alpha_space: correct_form = False print(Warning.YELLOW + "\nPhrase needs to be all letters!!!\n" + Warning.END) break #Check to make sure phrase isn't empty if self.phrase == "": correct_form = False print(Warning.YELLOW + "\nDid you mean to input nothing?", " Do you want to play or not?!?!\n" + Warning.END)
normal
{ "blob_id": "d0dbf5a13b8e718ed426a254546ba13da12b2c3e", "index": 4149, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass User_Game(CPU_Game):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass User_Game(CPU_Game):\n\n def get_user_phrase(self):\n correct_form = False\n while not correct_form:\n correct_form = True\n self.phrase = input('Please input a phrase: ').upper()\n for i in range(0, len(self.phrase)):\n alpha_space = self.phrase[i].isalpha() or self.phrase[i\n ].isspace()\n if not alpha_space:\n correct_form = False\n print(Warning.YELLOW +\n '\\nPhrase needs to be all letters!!!\\n' + Warning.END)\n break\n if self.phrase == '':\n correct_form = False\n print(Warning.YELLOW + '\\nDid you mean to input nothing?', \n \"\"\" Do you want to play or not?!?!\n\"\"\" + Warning.END)\n", "step-4": "from cpu_game import CPU_Game\nfrom warning_color import Warning\n\n\nclass User_Game(CPU_Game):\n\n def get_user_phrase(self):\n correct_form = False\n while not correct_form:\n correct_form = True\n self.phrase = input('Please input a phrase: ').upper()\n for i in range(0, len(self.phrase)):\n alpha_space = self.phrase[i].isalpha() or self.phrase[i\n ].isspace()\n if not alpha_space:\n correct_form = False\n print(Warning.YELLOW +\n '\\nPhrase needs to be all letters!!!\\n' + Warning.END)\n break\n if self.phrase == '':\n correct_form = False\n print(Warning.YELLOW + '\\nDid you mean to input nothing?', \n \"\"\" Do you want to play or not?!?!\n\"\"\" + Warning.END)\n", "step-5": "\n#Program written and maintained by Matthew Meyerink\n\n#File responsible for defining the game based on user input\n\nfrom cpu_game import CPU_Game\nfrom warning_color import Warning\n\nclass User_Game(CPU_Game):\n\n #Get the user phrase to start the game\n def get_user_phrase(self):\n correct_form = False\n while (not correct_form):\n\n correct_form = True\n\n #Recieve the input phrase\n self.phrase = input(\"Please input a phrase: \").upper()\n\n #Check to make sure no numbers or special characters in phrase\n for i in range(0, len(self.phrase)):\n alpha_space = (self.phrase[i].isalpha()\n or self.phrase[i].isspace())\n if not alpha_space:\n correct_form = False\n print(Warning.YELLOW +\n \"\\nPhrase needs to be all letters!!!\\n\" +\n Warning.END)\n break\n\n #Check to make sure phrase isn't empty\n if self.phrase == \"\":\n correct_form = False\n print(Warning.YELLOW +\n \"\\nDid you mean to input nothing?\",\n \" Do you want to play or not?!?!\\n\" +\n Warning.END)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from setuptools import setup, find_packages import sys, os version = '0.1' setup( name='ckanext-MYEXTENSION', version=version, description="description", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='ldhspace', author_email='ldhspace@yahoo.co.kr', url='www.naver.com', license='free', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), namespace_packages=['ckanext', 'ckanext.MYEXTENSION'], include_package_data=True, zip_safe=False, install_requires=[ # -*- Extra requirements: -*- ], entry_points=\ """ [ckan.plugins] # Add plugins here, eg usmetadata=ckanext.MYEXTENSION.plugin:USMetadataPlugin """, )
normal
{ "blob_id": "9d2c0d59b0b2b4e4fca942e648059738053c53d0", "index": 9376, "step-1": "<mask token>\n", "step-2": "<mask token>\nsetup(name='ckanext-MYEXTENSION', version=version, description=\n 'description', long_description='\\t', classifiers=[], keywords='',\n author='ldhspace', author_email='ldhspace@yahoo.co.kr', url=\n 'www.naver.com', license='free', packages=find_packages(exclude=[\n 'ez_setup', 'examples', 'tests']), namespace_packages=['ckanext',\n 'ckanext.MYEXTENSION'], include_package_data=True, zip_safe=False,\n install_requires=[], entry_points=\n \"\"\"\n [ckan.plugins]\n\t# Add plugins here, eg\n\tusmetadata=ckanext.MYEXTENSION.plugin:USMetadataPlugin\n\t\"\"\"\n )\n", "step-3": "<mask token>\nversion = '0.1'\nsetup(name='ckanext-MYEXTENSION', version=version, description=\n 'description', long_description='\\t', classifiers=[], keywords='',\n author='ldhspace', author_email='ldhspace@yahoo.co.kr', url=\n 'www.naver.com', license='free', packages=find_packages(exclude=[\n 'ez_setup', 'examples', 'tests']), namespace_packages=['ckanext',\n 'ckanext.MYEXTENSION'], include_package_data=True, zip_safe=False,\n install_requires=[], entry_points=\n \"\"\"\n [ckan.plugins]\n\t# Add plugins here, eg\n\tusmetadata=ckanext.MYEXTENSION.plugin:USMetadataPlugin\n\t\"\"\"\n )\n", "step-4": "from setuptools import setup, find_packages\nimport sys, os\nversion = '0.1'\nsetup(name='ckanext-MYEXTENSION', version=version, description=\n 'description', long_description='\\t', classifiers=[], keywords='',\n author='ldhspace', author_email='ldhspace@yahoo.co.kr', url=\n 'www.naver.com', license='free', packages=find_packages(exclude=[\n 'ez_setup', 'examples', 'tests']), namespace_packages=['ckanext',\n 'ckanext.MYEXTENSION'], include_package_data=True, zip_safe=False,\n install_requires=[], entry_points=\n \"\"\"\n [ckan.plugins]\n\t# Add plugins here, eg\n\tusmetadata=ckanext.MYEXTENSION.plugin:USMetadataPlugin\n\t\"\"\"\n )\n", "step-5": "from setuptools import setup, find_packages\nimport sys, os\n\nversion = '0.1'\n\nsetup(\n\tname='ckanext-MYEXTENSION',\n\tversion=version,\n\tdescription=\"description\",\n\tlong_description=\"\"\"\\\n\t\"\"\",\n\tclassifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n\tkeywords='',\n\tauthor='ldhspace',\n\tauthor_email='ldhspace@yahoo.co.kr',\n\turl='www.naver.com',\n\tlicense='free',\n\tpackages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n\tnamespace_packages=['ckanext', 'ckanext.MYEXTENSION'],\n\tinclude_package_data=True,\n\tzip_safe=False,\n\tinstall_requires=[\n\t\t# -*- Extra requirements: -*-\n\t],\n\tentry_points=\\\n\t\"\"\"\n [ckan.plugins]\n\t# Add plugins here, eg\n\tusmetadata=ckanext.MYEXTENSION.plugin:USMetadataPlugin\n\t\"\"\",\n)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for i in soup.find_all('a'): if 'href' in i.attrs: print(i.attrs['href']) <|reserved_special_token_1|> <|reserved_special_token_0|> url = 'http://www.dytt8.net/' user = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36' } html = urllib.request.urlopen(url) html.encoding = 'utf-8' soup = BeautifulSoup(html.read()) for i in soup.find_all('a'): if 'href' in i.attrs: print(i.attrs['href']) <|reserved_special_token_1|> import requests from bs4 import BeautifulSoup import urllib.request url = 'http://www.dytt8.net/' user = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36' } html = urllib.request.urlopen(url) html.encoding = 'utf-8' soup = BeautifulSoup(html.read()) for i in soup.find_all('a'): if 'href' in i.attrs: print(i.attrs['href'])
flexible
{ "blob_id": "2e571e3412bf9f3a42bf87976ea9a5ec68d5815c", "index": 9056, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in soup.find_all('a'):\n if 'href' in i.attrs:\n print(i.attrs['href'])\n", "step-3": "<mask token>\nurl = 'http://www.dytt8.net/'\nuser = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'\n }\nhtml = urllib.request.urlopen(url)\nhtml.encoding = 'utf-8'\nsoup = BeautifulSoup(html.read())\nfor i in soup.find_all('a'):\n if 'href' in i.attrs:\n print(i.attrs['href'])\n", "step-4": "import requests\nfrom bs4 import BeautifulSoup\nimport urllib.request\nurl = 'http://www.dytt8.net/'\nuser = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'\n }\nhtml = urllib.request.urlopen(url)\nhtml.encoding = 'utf-8'\nsoup = BeautifulSoup(html.read())\nfor i in soup.find_all('a'):\n if 'href' in i.attrs:\n print(i.attrs['href'])\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> def get_files_api(): """Get the files API client.""" return get_api_client(cloudsmith_api.FilesApi) def validate_request_file_upload(owner, repo, filepath, md5_checksum=None): """Validate parameters for requesting a file upload.""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): _, _, headers = client.files_validate_with_http_info(owner=owner, repo=repo, data={'filename': os.path.basename(filepath), 'md5_checksum': md5_checksum}) ratelimits.maybe_rate_limit(client, headers) return md5_checksum <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def get_files_api(): """Get the files API client.""" return get_api_client(cloudsmith_api.FilesApi) def validate_request_file_upload(owner, repo, filepath, md5_checksum=None): """Validate parameters for requesting a file upload.""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): _, _, headers = client.files_validate_with_http_info(owner=owner, repo=repo, data={'filename': os.path.basename(filepath), 'md5_checksum': md5_checksum}) ratelimits.maybe_rate_limit(client, headers) return md5_checksum <|reserved_special_token_0|> def upload_file(upload_url, upload_fields, filepath, callback=None): """Upload a pre-signed file to Cloudsmith.""" upload_fields = list(upload_fields.items()) upload_fields.append(('file', (os.path.basename(filepath), click. open_file(filepath, 'rb')))) encoder = MultipartEncoder(upload_fields) monitor = MultipartEncoderMonitor(encoder, callback=callback) config = cloudsmith_api.Configuration() if config.proxy: proxies = {'http': config.proxy, 'https': config.proxy} else: proxies = None headers = {'content-type': monitor.content_type} client = get_files_api() headers['user-agent'] = client.api_client.user_agent session = create_requests_session() resp = session.post(upload_url, data=monitor, headers=headers, proxies= proxies) try: resp.raise_for_status() except requests.RequestException as exc: raise ApiException(resp.status_code, headers=exc.response.headers, body=exc.response.content) <|reserved_special_token_1|> <|reserved_special_token_0|> def get_files_api(): """Get the files API client.""" return get_api_client(cloudsmith_api.FilesApi) def validate_request_file_upload(owner, repo, filepath, md5_checksum=None): """Validate parameters for requesting a file upload.""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): _, _, headers = client.files_validate_with_http_info(owner=owner, repo=repo, data={'filename': os.path.basename(filepath), 'md5_checksum': md5_checksum}) ratelimits.maybe_rate_limit(client, headers) return md5_checksum def request_file_upload(owner, repo, filepath, md5_checksum=None): """Request a new package file upload (for creating packages).""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): data, _, headers = client.files_create_with_http_info(owner=owner, repo=repo, data={'filename': os.path.basename(filepath), 'md5_checksum': md5_checksum}) ratelimits.maybe_rate_limit(client, headers) return data.identifier, data.upload_url, data.upload_fields def upload_file(upload_url, upload_fields, filepath, callback=None): """Upload a pre-signed file to Cloudsmith.""" upload_fields = list(upload_fields.items()) upload_fields.append(('file', (os.path.basename(filepath), click. open_file(filepath, 'rb')))) encoder = MultipartEncoder(upload_fields) monitor = MultipartEncoderMonitor(encoder, callback=callback) config = cloudsmith_api.Configuration() if config.proxy: proxies = {'http': config.proxy, 'https': config.proxy} else: proxies = None headers = {'content-type': monitor.content_type} client = get_files_api() headers['user-agent'] = client.api_client.user_agent session = create_requests_session() resp = session.post(upload_url, data=monitor, headers=headers, proxies= proxies) try: resp.raise_for_status() except requests.RequestException as exc: raise ApiException(resp.status_code, headers=exc.response.headers, body=exc.response.content) <|reserved_special_token_1|> <|reserved_special_token_0|> import os import click import cloudsmith_api import requests from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor from .. import ratelimits from ..rest import create_requests_session from ..utils import calculate_file_md5 from .exceptions import ApiException, catch_raise_api_exception from .init import get_api_client def get_files_api(): """Get the files API client.""" return get_api_client(cloudsmith_api.FilesApi) def validate_request_file_upload(owner, repo, filepath, md5_checksum=None): """Validate parameters for requesting a file upload.""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): _, _, headers = client.files_validate_with_http_info(owner=owner, repo=repo, data={'filename': os.path.basename(filepath), 'md5_checksum': md5_checksum}) ratelimits.maybe_rate_limit(client, headers) return md5_checksum def request_file_upload(owner, repo, filepath, md5_checksum=None): """Request a new package file upload (for creating packages).""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): data, _, headers = client.files_create_with_http_info(owner=owner, repo=repo, data={'filename': os.path.basename(filepath), 'md5_checksum': md5_checksum}) ratelimits.maybe_rate_limit(client, headers) return data.identifier, data.upload_url, data.upload_fields def upload_file(upload_url, upload_fields, filepath, callback=None): """Upload a pre-signed file to Cloudsmith.""" upload_fields = list(upload_fields.items()) upload_fields.append(('file', (os.path.basename(filepath), click. open_file(filepath, 'rb')))) encoder = MultipartEncoder(upload_fields) monitor = MultipartEncoderMonitor(encoder, callback=callback) config = cloudsmith_api.Configuration() if config.proxy: proxies = {'http': config.proxy, 'https': config.proxy} else: proxies = None headers = {'content-type': monitor.content_type} client = get_files_api() headers['user-agent'] = client.api_client.user_agent session = create_requests_session() resp = session.post(upload_url, data=monitor, headers=headers, proxies= proxies) try: resp.raise_for_status() except requests.RequestException as exc: raise ApiException(resp.status_code, headers=exc.response.headers, body=exc.response.content) <|reserved_special_token_1|> """API - Files endpoints.""" import os import click import cloudsmith_api import requests from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor from .. import ratelimits from ..rest import create_requests_session from ..utils import calculate_file_md5 from .exceptions import ApiException, catch_raise_api_exception from .init import get_api_client def get_files_api(): """Get the files API client.""" return get_api_client(cloudsmith_api.FilesApi) def validate_request_file_upload(owner, repo, filepath, md5_checksum=None): """Validate parameters for requesting a file upload.""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): _, _, headers = client.files_validate_with_http_info( owner=owner, repo=repo, data={"filename": os.path.basename(filepath), "md5_checksum": md5_checksum}, ) ratelimits.maybe_rate_limit(client, headers) return md5_checksum def request_file_upload(owner, repo, filepath, md5_checksum=None): """Request a new package file upload (for creating packages).""" client = get_files_api() md5_checksum = md5_checksum or calculate_file_md5(filepath) with catch_raise_api_exception(): data, _, headers = client.files_create_with_http_info( owner=owner, repo=repo, data={"filename": os.path.basename(filepath), "md5_checksum": md5_checksum}, ) # pylint: disable=no-member # Pylint detects the returned value as a tuple ratelimits.maybe_rate_limit(client, headers) return data.identifier, data.upload_url, data.upload_fields def upload_file(upload_url, upload_fields, filepath, callback=None): """Upload a pre-signed file to Cloudsmith.""" upload_fields = list(upload_fields.items()) upload_fields.append( ("file", (os.path.basename(filepath), click.open_file(filepath, "rb"))) ) encoder = MultipartEncoder(upload_fields) monitor = MultipartEncoderMonitor(encoder, callback=callback) config = cloudsmith_api.Configuration() if config.proxy: proxies = {"http": config.proxy, "https": config.proxy} else: proxies = None headers = {"content-type": monitor.content_type} client = get_files_api() headers["user-agent"] = client.api_client.user_agent session = create_requests_session() resp = session.post(upload_url, data=monitor, headers=headers, proxies=proxies) try: resp.raise_for_status() except requests.RequestException as exc: raise ApiException( resp.status_code, headers=exc.response.headers, body=exc.response.content )
flexible
{ "blob_id": "ee03263d92372899ec1feaf3a8ea48677b053676", "index": 6281, "step-1": "<mask token>\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\n<mask token>\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(('file', (os.path.basename(filepath), click.\n open_file(filepath, 'rb'))))\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {'http': config.proxy, 'https': config.proxy}\n else:\n proxies = None\n headers = {'content-type': monitor.content_type}\n client = get_files_api()\n headers['user-agent'] = client.api_client.user_agent\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=\n proxies)\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(resp.status_code, headers=exc.response.headers,\n body=exc.response.content)\n", "step-3": "<mask token>\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\ndef request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Request a new package file upload (for creating packages).\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n data, _, headers = client.files_create_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return data.identifier, data.upload_url, data.upload_fields\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(('file', (os.path.basename(filepath), click.\n open_file(filepath, 'rb'))))\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {'http': config.proxy, 'https': config.proxy}\n else:\n proxies = None\n headers = {'content-type': monitor.content_type}\n client = get_files_api()\n headers['user-agent'] = client.api_client.user_agent\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=\n proxies)\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(resp.status_code, headers=exc.response.headers,\n body=exc.response.content)\n", "step-4": "<mask token>\nimport os\nimport click\nimport cloudsmith_api\nimport requests\nfrom requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor\nfrom .. import ratelimits\nfrom ..rest import create_requests_session\nfrom ..utils import calculate_file_md5\nfrom .exceptions import ApiException, catch_raise_api_exception\nfrom .init import get_api_client\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\ndef request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Request a new package file upload (for creating packages).\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n data, _, headers = client.files_create_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return data.identifier, data.upload_url, data.upload_fields\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(('file', (os.path.basename(filepath), click.\n open_file(filepath, 'rb'))))\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {'http': config.proxy, 'https': config.proxy}\n else:\n proxies = None\n headers = {'content-type': monitor.content_type}\n client = get_files_api()\n headers['user-agent'] = client.api_client.user_agent\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=\n proxies)\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(resp.status_code, headers=exc.response.headers,\n body=exc.response.content)\n", "step-5": "\"\"\"API - Files endpoints.\"\"\"\n\nimport os\n\nimport click\nimport cloudsmith_api\nimport requests\nfrom requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor\n\nfrom .. import ratelimits\nfrom ..rest import create_requests_session\nfrom ..utils import calculate_file_md5\nfrom .exceptions import ApiException, catch_raise_api_exception\nfrom .init import get_api_client\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(\n owner=owner,\n repo=repo,\n data={\"filename\": os.path.basename(filepath), \"md5_checksum\": md5_checksum},\n )\n\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\ndef request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Request a new package file upload (for creating packages).\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n\n with catch_raise_api_exception():\n data, _, headers = client.files_create_with_http_info(\n owner=owner,\n repo=repo,\n data={\"filename\": os.path.basename(filepath), \"md5_checksum\": md5_checksum},\n )\n\n # pylint: disable=no-member\n # Pylint detects the returned value as a tuple\n ratelimits.maybe_rate_limit(client, headers)\n return data.identifier, data.upload_url, data.upload_fields\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(\n (\"file\", (os.path.basename(filepath), click.open_file(filepath, \"rb\")))\n )\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {\"http\": config.proxy, \"https\": config.proxy}\n else:\n proxies = None\n\n headers = {\"content-type\": monitor.content_type}\n\n client = get_files_api()\n headers[\"user-agent\"] = client.api_client.user_agent\n\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=proxies)\n\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(\n resp.status_code, headers=exc.response.headers, body=exc.response.content\n )\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
import art import random print(art.guess) print(art.the) print(art.number) print("I'm thinking of a number between 1 and 100") number = random.randint(1,100) turns = 0 difficulty = input("Chose a difficulty. 'easy' or 'hard'?\n") if difficulty == 'easy': turns +=10 else: turns +=5 gameover = False while not gameover: print(f"You've got {turns} turns left!") guess = int(input("Guess a number!\n")) if guess > number: print("too high!") turns -= 1 elif guess < number: print("too low!") turns -= 1 elif guess == number: print("Thats it! You Win!") gameover = True if turns == 0: print("You used all your chances!") print("GAME OVER") gameover = True
normal
{ "blob_id": "f2bf4f5b057af1d2362ec8d1472aa76e774be1c7", "index": 2736, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(art.guess)\nprint(art.the)\nprint(art.number)\nprint(\"I'm thinking of a number between 1 and 100\")\n<mask token>\nif difficulty == 'easy':\n turns += 10\nelse:\n turns += 5\n<mask token>\nwhile not gameover:\n print(f\"You've got {turns} turns left!\")\n guess = int(input('Guess a number!\\n'))\n if guess > number:\n print('too high!')\n turns -= 1\n elif guess < number:\n print('too low!')\n turns -= 1\n elif guess == number:\n print('Thats it! You Win!')\n gameover = True\n if turns == 0:\n print('You used all your chances!')\n print('GAME OVER')\n gameover = True\n", "step-3": "<mask token>\nprint(art.guess)\nprint(art.the)\nprint(art.number)\nprint(\"I'm thinking of a number between 1 and 100\")\nnumber = random.randint(1, 100)\nturns = 0\ndifficulty = input(\"Chose a difficulty. 'easy' or 'hard'?\\n\")\nif difficulty == 'easy':\n turns += 10\nelse:\n turns += 5\ngameover = False\nwhile not gameover:\n print(f\"You've got {turns} turns left!\")\n guess = int(input('Guess a number!\\n'))\n if guess > number:\n print('too high!')\n turns -= 1\n elif guess < number:\n print('too low!')\n turns -= 1\n elif guess == number:\n print('Thats it! You Win!')\n gameover = True\n if turns == 0:\n print('You used all your chances!')\n print('GAME OVER')\n gameover = True\n", "step-4": "import art\nimport random\nprint(art.guess)\nprint(art.the)\nprint(art.number)\nprint(\"I'm thinking of a number between 1 and 100\")\nnumber = random.randint(1, 100)\nturns = 0\ndifficulty = input(\"Chose a difficulty. 'easy' or 'hard'?\\n\")\nif difficulty == 'easy':\n turns += 10\nelse:\n turns += 5\ngameover = False\nwhile not gameover:\n print(f\"You've got {turns} turns left!\")\n guess = int(input('Guess a number!\\n'))\n if guess > number:\n print('too high!')\n turns -= 1\n elif guess < number:\n print('too low!')\n turns -= 1\n elif guess == number:\n print('Thats it! You Win!')\n gameover = True\n if turns == 0:\n print('You used all your chances!')\n print('GAME OVER')\n gameover = True\n", "step-5": "import art\nimport random\n\nprint(art.guess)\nprint(art.the)\nprint(art.number)\nprint(\"I'm thinking of a number between 1 and 100\")\n\nnumber = random.randint(1,100)\nturns = 0\n\ndifficulty = input(\"Chose a difficulty. 'easy' or 'hard'?\\n\")\n\nif difficulty == 'easy':\n turns +=10\nelse:\n turns +=5\n\ngameover = False\n\nwhile not gameover:\n print(f\"You've got {turns} turns left!\")\n guess = int(input(\"Guess a number!\\n\"))\n\n if guess > number:\n print(\"too high!\")\n turns -= 1\n elif guess < number:\n print(\"too low!\")\n turns -= 1\n elif guess == number:\n print(\"Thats it! You Win!\")\n gameover = True\n\n if turns == 0:\n print(\"You used all your chances!\")\n print(\"GAME OVER\")\n gameover = True", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#!/usr/bin/env python """ Script to download and plot RaspberryShake station data Also computes and plots theoretical phase arrival times and raypaths. See https://docs.obspy.org/packages/obspy.taup.html for more info on Earth models and phase-nmaing nomenclature. Stephen Hicks Imperial College London Feb 2020 """ from obspy.clients.fdsn import Client from obspy import UTCDateTime from obspy.taup import TauPyModel from obspy.geodetics.base import locations2degrees import matplotlib.pyplot as plt # Start of parameters to define NETWORK = "AM" # AM = RaspberryShake network STATION = "RAEBE" # Station code of station to get data for CHANNEL = "EHZ" # channel to grab data for (e.g. EHZ, SHZ, EHE, EHN) EQ_TIME = "2020-01-07T08:24:26" # origin time of earthquake T_START = 0 # Length in seconds of data to plot before origin time T_END = 1250 # Length in seconds of data to plot after origin time PHASES = ["P", "S"] # list of phases to compute theoretical times for EVT_LAT = 17.916 # Latitude of event EVT_LON = -66.813 # Longitude of event EVT_Z = 10 # Depth of event STA_LAT = 51.33 # Latitude of station STA_LON = -0.49 # Longitude of station F1 = 0.3 # High-pass filter corner F2 = 0.7 # Low-pass filter corner LABEL = "M 6.4 Puerto Rico" # Title to plot on figure MODEL = 'iasp91' # Velocity model to predict travel-times through # End of parameters to define # Define fdsn client to get data from client = Client('http://fdsnws.raspberryshakedata.com') # Define start and end time orig_time = UTCDateTime(EQ_TIME) t1 = orig_time - T_START t2 = orig_time + T_END # Download and filfter data st = client.get_waveforms(NETWORK, STATION, "00", CHANNEL, starttime=t1, endtime=t2, attach_response=True) st.merge() st.detrend(type="demean") st.remove_response() st.filter("bandpass", freqmin=F1, freqmax=F2, corners=4) st.trim(t1, t2) # Set-up figure fig = plt.figure(figsize=(12, 8)) plt.suptitle(LABEL) ax = plt.subplot(121) # Set-up taup travel-time model dist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON) model = TauPyModel(model=MODEL) # Now plot the waveform data ax.plot(st[0].times(reftime=orig_time), st[0].data*1000, linewidth=0.2, color="darkred") ymin, ymax = ax.get_ylim() # Now plot the theoretical arrival times for phase in PHASES: phase = [phase] tt = model.get_travel_times(source_depth_in_km=EVT_Z, distance_in_degree=dist, phase_list=phase) ax.vlines(tt[0].time, ymin, ymax, color="blue", linewidth=1.2, zorder=3, linestyle="--", alpha=0.5) ax.text(tt[0].time*1.02, ymax, phase[0], fontsize=12, horizontalalignment="left", verticalalignment="top") ax.set_xlabel("Time after earthquake (s)") ax.set_title("{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz".format( st[0].stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.channel, F1, F2)) ax.set_ylabel("Ground velocity (mm/s)") # Now plot the raypaths through the Earth ax2 = plt.subplot(122, projection='polar') arrivals = model.get_ray_paths( source_depth_in_km=EVT_Z, distance_in_degree=dist, phase_list=PHASES) ax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False, label_arrivals=True) ax3.set_title("Epicentral distance: {:3.1f}$^\circ$".format(dist)) # Save and plot the figure plt.tight_layout(rect=[0, 0.03, 1, 0.95]) plt.savefig("traces.png") plt.show()
normal
{ "blob_id": "8d8ea6ad7a3ed1a1e6e96ab75260ecf6e8211d32", "index": 1305, "step-1": "<mask token>\n", "step-2": "<mask token>\nst.merge()\nst.detrend(type='demean')\nst.remove_response()\nst.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\n<mask token>\nplt.suptitle(LABEL)\n<mask token>\nax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,\n color='darkred')\n<mask token>\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist, phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,\n linestyle='--', alpha=0.5)\n ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,\n horizontalalignment='left', verticalalignment='top')\nax.set_xlabel('Time after earthquake (s)')\nax.set_title(\"\"\"{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz\"\"\".format(st[0]\n .stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.\n channel, F1, F2))\nax.set_ylabel('Ground velocity (mm/s)')\n<mask token>\nax3.set_title('Epicentral distance: {:3.1f}$^\\\\circ$'.format(dist))\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig('traces.png')\nplt.show()\n", "step-3": "<mask token>\nNETWORK = 'AM'\nSTATION = 'RAEBE'\nCHANNEL = 'EHZ'\nEQ_TIME = '2020-01-07T08:24:26'\nT_START = 0\nT_END = 1250\nPHASES = ['P', 'S']\nEVT_LAT = 17.916\nEVT_LON = -66.813\nEVT_Z = 10\nSTA_LAT = 51.33\nSTA_LON = -0.49\nF1 = 0.3\nF2 = 0.7\nLABEL = 'M 6.4 Puerto Rico'\nMODEL = 'iasp91'\nclient = Client('http://fdsnws.raspberryshakedata.com')\norig_time = UTCDateTime(EQ_TIME)\nt1 = orig_time - T_START\nt2 = orig_time + T_END\nst = client.get_waveforms(NETWORK, STATION, '00', CHANNEL, starttime=t1,\n endtime=t2, attach_response=True)\nst.merge()\nst.detrend(type='demean')\nst.remove_response()\nst.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\nfig = plt.figure(figsize=(12, 8))\nplt.suptitle(LABEL)\nax = plt.subplot(121)\ndist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)\nmodel = TauPyModel(model=MODEL)\nax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,\n color='darkred')\nymin, ymax = ax.get_ylim()\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist, phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,\n linestyle='--', alpha=0.5)\n ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,\n horizontalalignment='left', verticalalignment='top')\nax.set_xlabel('Time after earthquake (s)')\nax.set_title(\"\"\"{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz\"\"\".format(st[0]\n .stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.\n channel, F1, F2))\nax.set_ylabel('Ground velocity (mm/s)')\nax2 = plt.subplot(122, projection='polar')\narrivals = model.get_ray_paths(source_depth_in_km=EVT_Z, distance_in_degree\n =dist, phase_list=PHASES)\nax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,\n label_arrivals=True)\nax3.set_title('Epicentral distance: {:3.1f}$^\\\\circ$'.format(dist))\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig('traces.png')\nplt.show()\n", "step-4": "<mask token>\nfrom obspy.clients.fdsn import Client\nfrom obspy import UTCDateTime\nfrom obspy.taup import TauPyModel\nfrom obspy.geodetics.base import locations2degrees\nimport matplotlib.pyplot as plt\nNETWORK = 'AM'\nSTATION = 'RAEBE'\nCHANNEL = 'EHZ'\nEQ_TIME = '2020-01-07T08:24:26'\nT_START = 0\nT_END = 1250\nPHASES = ['P', 'S']\nEVT_LAT = 17.916\nEVT_LON = -66.813\nEVT_Z = 10\nSTA_LAT = 51.33\nSTA_LON = -0.49\nF1 = 0.3\nF2 = 0.7\nLABEL = 'M 6.4 Puerto Rico'\nMODEL = 'iasp91'\nclient = Client('http://fdsnws.raspberryshakedata.com')\norig_time = UTCDateTime(EQ_TIME)\nt1 = orig_time - T_START\nt2 = orig_time + T_END\nst = client.get_waveforms(NETWORK, STATION, '00', CHANNEL, starttime=t1,\n endtime=t2, attach_response=True)\nst.merge()\nst.detrend(type='demean')\nst.remove_response()\nst.filter('bandpass', freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\nfig = plt.figure(figsize=(12, 8))\nplt.suptitle(LABEL)\nax = plt.subplot(121)\ndist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)\nmodel = TauPyModel(model=MODEL)\nax.plot(st[0].times(reftime=orig_time), st[0].data * 1000, linewidth=0.2,\n color='darkred')\nymin, ymax = ax.get_ylim()\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist, phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color='blue', linewidth=1.2, zorder=3,\n linestyle='--', alpha=0.5)\n ax.text(tt[0].time * 1.02, ymax, phase[0], fontsize=12,\n horizontalalignment='left', verticalalignment='top')\nax.set_xlabel('Time after earthquake (s)')\nax.set_title(\"\"\"{:}.{:}.{:}.{:}\nBandpass filter: {:}-{:} Hz\"\"\".format(st[0]\n .stats.network, st[0].stats.station, st[0].stats.location, st[0].stats.\n channel, F1, F2))\nax.set_ylabel('Ground velocity (mm/s)')\nax2 = plt.subplot(122, projection='polar')\narrivals = model.get_ray_paths(source_depth_in_km=EVT_Z, distance_in_degree\n =dist, phase_list=PHASES)\nax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,\n label_arrivals=True)\nax3.set_title('Epicentral distance: {:3.1f}$^\\\\circ$'.format(dist))\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig('traces.png')\nplt.show()\n", "step-5": "#!/usr/bin/env python\n\"\"\"\nScript to download and plot RaspberryShake station data\nAlso computes and plots theoretical phase arrival times and raypaths.\n\nSee https://docs.obspy.org/packages/obspy.taup.html for more info on \nEarth models and phase-nmaing nomenclature.\n\nStephen Hicks\nImperial College London\nFeb 2020\n\"\"\"\n\nfrom obspy.clients.fdsn import Client\nfrom obspy import UTCDateTime\nfrom obspy.taup import TauPyModel\nfrom obspy.geodetics.base import locations2degrees\nimport matplotlib.pyplot as plt\n\n# Start of parameters to define\nNETWORK = \"AM\" # AM = RaspberryShake network\nSTATION = \"RAEBE\" # Station code of station to get data for\nCHANNEL = \"EHZ\" # channel to grab data for (e.g. EHZ, SHZ, EHE, EHN)\nEQ_TIME = \"2020-01-07T08:24:26\" # origin time of earthquake\nT_START = 0 # Length in seconds of data to plot before origin time\nT_END = 1250 # Length in seconds of data to plot after origin time\nPHASES = [\"P\", \"S\"] # list of phases to compute theoretical times for\nEVT_LAT = 17.916 # Latitude of event\nEVT_LON = -66.813 # Longitude of event \nEVT_Z = 10 # Depth of event\nSTA_LAT = 51.33 # Latitude of station \nSTA_LON = -0.49 # Longitude of station\nF1 = 0.3 # High-pass filter corner\nF2 = 0.7 # Low-pass filter corner \nLABEL = \"M 6.4 Puerto Rico\" # Title to plot on figure\nMODEL = 'iasp91' # Velocity model to predict travel-times through\n# End of parameters to define\n\n# Define fdsn client to get data from\nclient = Client('http://fdsnws.raspberryshakedata.com')\n\n# Define start and end time\norig_time = UTCDateTime(EQ_TIME)\nt1 = orig_time - T_START\nt2 = orig_time + T_END\n# Download and filfter data\nst = client.get_waveforms(NETWORK, STATION, \"00\", CHANNEL,\n starttime=t1, endtime=t2, attach_response=True)\nst.merge()\nst.detrend(type=\"demean\")\nst.remove_response()\nst.filter(\"bandpass\", freqmin=F1, freqmax=F2, corners=4)\nst.trim(t1, t2)\n\n# Set-up figure\nfig = plt.figure(figsize=(12, 8))\nplt.suptitle(LABEL)\nax = plt.subplot(121)\n\n# Set-up taup travel-time model\ndist = locations2degrees(EVT_LAT, EVT_LON, STA_LAT, STA_LON)\nmodel = TauPyModel(model=MODEL)\n\n# Now plot the waveform data\nax.plot(st[0].times(reftime=orig_time), st[0].data*1000, linewidth=0.2,\n color=\"darkred\")\nymin, ymax = ax.get_ylim()\n\n# Now plot the theoretical arrival times\nfor phase in PHASES:\n phase = [phase]\n tt = model.get_travel_times(source_depth_in_km=EVT_Z,\n distance_in_degree=dist,\n phase_list=phase)\n ax.vlines(tt[0].time, ymin, ymax, color=\"blue\",\n linewidth=1.2, zorder=3, linestyle=\"--\", alpha=0.5)\n ax.text(tt[0].time*1.02, ymax, phase[0], fontsize=12,\n horizontalalignment=\"left\", verticalalignment=\"top\")\nax.set_xlabel(\"Time after earthquake (s)\")\nax.set_title(\"{:}.{:}.{:}.{:}\\nBandpass filter: {:}-{:} Hz\".format(\n st[0].stats.network, st[0].stats.station, st[0].stats.location,\n st[0].stats.channel, F1, F2))\nax.set_ylabel(\"Ground velocity (mm/s)\")\n\n# Now plot the raypaths through the Earth\nax2 = plt.subplot(122, projection='polar')\narrivals = model.get_ray_paths(\n source_depth_in_km=EVT_Z, distance_in_degree=dist,\n phase_list=PHASES)\nax3 = arrivals.plot_rays(phase, legend=False, ax=ax2, show=False,\n label_arrivals=True)\nax3.set_title(\"Epicentral distance: {:3.1f}$^\\circ$\".format(dist))\n\n# Save and plot the figure\nplt.tight_layout(rect=[0, 0.03, 1, 0.95])\nplt.savefig(\"traces.png\")\nplt.show()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class _GenericBot: <|reserved_special_token_0|> def __init__(self, pos, inventory=None): """Initialize with an empty inventory. inventory is a dictionary. If None, an empty one will be used.""" if inventory is None: self._inventory = {} else: self._inventory = deepcopy(inventory) self._pos = deepcopy(pos) def take_action(self, action): """Take the action (acquired from _get_legal_actions).""" getattr(self, action['func'])(*action.get('args', ()), **action.get ('kwargs', {})) def take_actions(self, actions, seconds=None): """Take these actions. If seconds is not None, sleep 'seconds' seconds. """ if not actions: return self.take_action(actions[0]) for action in actions[1:]: if seconds is not None: sleep(seconds) self.take_action(action) def get_pos(self): """Return the position.""" return deepcopy(self._pos) def get_legal_actions(self, block_=None): """Return a list of legal actions. If block_ is None, return all legal actions. Otherwise, return all legal actions that don't involve placing the block.""" return self._get_move_actions(block_) + self._get_mine_actions( ) + self._get_placement_actions(block_) <|reserved_special_token_0|> <|reserved_special_token_0|> def _place(self, loc, exclude=None, block_=None): """Place a block from the inventory only. If exclude is not None, place a block that is not 'exclude'. If block is not None, place that block only. """ if not self._inventory: raise Exception('Inventory empty') if block_ is None: for key in self._inventory: if key != exclude: block_ = key break else: raise Exception( 'You requested not to place %s, but it is the only block in the inventory.' % exclude) if block_ not in self._inventory: raise Exception('Block %s is not in the inventory' % block_) if self._inventory[block_] == 1: del self._inventory[block_] else: self._inventory[block_] -= 1 self._set_block(loc, block_) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def _mine(self, loc): """Mine the block.""" block_ = self._get_block(loc) self._add_to_inv(block_) self._set_block(loc, _AIR) def _get_move_actions(self, exclude=None): """Return a list of legal movement actions. exclude is the block to exclude. """ rtn = [] can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR, _WATER} if can_move_up: if self._surrounded(): rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, 1, 0),)}) else: rtn.append({'func': '_move_up', 'args': (exclude,)}) hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0)) if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}: rtn.append({'func': '_move_down'}) for dir_ in _adj_dirs(): rtn.extend(self._side_moves(dir_, can_move_up)) return rtn def _side_moves(self, dir_, can_move_up): """Return the list of side moves. dir_ is an adjacent direction. can_move_up is a boolean for whether or not the bot can move up. """ rtn = [] base_pos = self._pos + dir_ base_block = self._get_block(base_pos) empty_blocks = {_AIR, _WATER} if can_move_up and base_block not in {_AIR, _LAVA, _WATER}: for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1, 0),)}) for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: pos = base_pos + _Vec3(0, -1, 0) for _ in xrange(_DROP_PLUS_1): block_ = self._get_block(pos) if block_ != _AIR: if block_ != _LAVA: rtn.append({'func': '_move', 'args': (pos + _Vec3(0, 1, 0),)}) break pos.y -= 1 <|reserved_special_token_0|> def _get_mine_actions(self): """Return a list of legal mining actions (that only involve mining and not moving).""" rtn = [] dont_mine = {_AIR, _WATER, _LAVA} pos_above = self._pos + _Vec3(0, 2, 0) if self._get_block(pos_above) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos_above,)}) for dir_ in _adj_dirs(): pos = self._pos + dir_ for _ in xrange(2): if self._get_block(pos) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos,)}) pos = pos + _Vec3(0, 1, 0) return rtn def _get_placement_actions(self, exclude=None): """Return a list of legal actions that only involve placing a block from the inventory. exclude is a block id. It is the block that should not be placed. If None, any block can be placed.""" if not self._has_blocks_to_place(exclude=exclude): return [] dirs = [_Vec3(0, 2, 0)] for dir_ in _adj_dirs(): dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)]) if self._get_block(self._pos + dir_) in [_AIR, _WATER]: dirs.append(dir_ + _Vec3(0, -1, 0)) rtn = [] for dir_ in dirs: pos = self._pos + dir_ if self._can_place(pos): rtn.append({'func': '_place', 'args': (pos,), 'kwargs': { 'exclude': exclude}}) return rtn <|reserved_special_token_0|> def _has_blocks_to_place(self, exclude=None): """Return whether or not the bot can place a block from the inventory. If exclude is None, any block can be placed.""" for block_ in self._inventory: if block_ != exclude: return True return False <|reserved_special_token_0|> <|reserved_special_token_0|> class _ImaginaryBot(_GenericBot): """A bot used for finding paths that doesn't actually change blocks in the world.""" def __init__(self, pos, inventory=None): """Create a new bot.""" _GenericBot.__init__(self, pos, inventory) self._changes = {} def _set_block(self, pos, block_): """Set a block. block_ is the block id.""" self._changes[deepcopy(pos)] = block def _get_block(self, pos): """Get the block at the position.""" if pos in self._changes: return self._changes[pos] else: return _get_mc().getBlock(pos) def get_block(self, pos): """The public version.""" return self._get_block(pos) def __hash__(self): """Return the hash.""" return hash(frozenset([self._pos] + _key_vals(self._inventory) + _key_vals(self._changes))) class Bot(_GenericBot): """The real bot. All vector arguments are Vec3s.""" _BOT_BLOCK = block.IRON_BLOCK.id def __init__(self): """Create a bot next to the player.""" pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0) pos = _Vec3(pos.x, pos.y, pos.z) _GenericBot.__init__(self, pos) self._pos = pos self._move(self._pos) @staticmethod def destroy_all(): """Destroy all bots within a small distance (in case I forget to destroy one).""" player_loc = _player_loc() minec = _get_mc() rad = 10 for x in xrange(player_loc.x - rad, player_loc.x + rad): for y in xrange(player_loc.y - rad, player_loc.y + rad): for z in xrange(player_loc.z - rad, player_loc.z + rad): if minec.getBlock(x, y, z) == Bot._BOT_BLOCK: minec.setBlock(x, y, z, _AIR) def destroy(self): """Set itself to air.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) def fetch(self, block_name): """Mine and return a block to the player.""" imag_bot = _ImaginaryBot(self._pos, self._inventory) block_id = getattr(block, block_name).id block_loc = self._get_block_loc(block_id) mine_prob = _MineProblem(imag_bot, block_loc, block_id) mine_actions = astar(mine_prob, _mine_heuristic) self.take_actions(mine_actions, _DELAY) imag_bot = _ImaginaryBot(self._pos, self._inventory) player_loc = _player_loc() return_prob = _ReturnProblem(imag_bot, block_id, player_loc) return_actions = astar(return_prob, _return_heuristic) imag_bot.take_actions(return_actions) return_actions.append({'func': '_place', 'args': (imag_bot.get_pos( ) + player_loc) / 2, 'kwargs': {'block': block_id}}) self.take_actions(return_actions, _DELAY) def _get_block_loc(self, block_id): """Return the location of the block.""" find_prob = FindProblem(self._pos, block_id) dirs = bfs(find_prob) return self._pos + sum(dirs) def _set_block(self, pos, block_): """Place an actual block in the world. block is a block id.""" _get_mc().setBlock(pos, block_) def _get_block(self, pos): """Get the block at the position.""" return _get_mc().getBlock(pos) def _move(self, pos): """Move there, and set the appropriate blocks.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) self._set_block(pos, self._BOT_BLOCK) self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK) self._pos = pos class FindProblem(SearchProblem): """Problem for finding the location of a block in the world. A state in this problem is a location. """ def __init__(self, start_loc, block_id): """Initialize.""" self._start_loc = deepcopy(start_loc) self._block_id = block_id def getStartState(self): """Return the starting location.""" return self._start_loc def isGoalState(self, state): return _get_mc().getBlock(state) == self._block_id def getSuccessors(self, state): """Return the successors.""" rtn = [] for dir_ in _all_dirs(): successor = state + dir_ if successor.y <= _get_mc().getHeight(successor.x, successor.z ) and _get_mc().getBlock(successor) != _BEDROCK: rtn.append((successor, dir_, 1)) return rtn class _MineProblem(SearchProblem): """The problem of finding the block and mining it (not returning it).""" def __init__(self, imag_bot, block_loc, block_id): """Initialize the problem with an _ImaginaryBot. block_loc is a Vec3. """ self._bot = imag_bot self._block_loc = deepcopy(block_loc) self._block_id = block_id def get_block_loc(self): """Return the block location.""" return deepcopy(self._block_loc) def get_block_id(self): """Return the block it's trying to mine.""" return self._block_id def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot has the block.""" return state.contains(self._block_id) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn class _ReturnProblem(SearchProblem): """The problem of returning to the player. This does not place the block next to the player.""" def __init__(self, imag_bot, block_, player_loc): """Initialized the problem with an _ImaginaryBot. block is a block id.""" self._bot = imag_bot self._block = block_ self._player_loc = player_loc def get_player_loc(self): """Return the player location.""" return deepcopy(self._player_loc) def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot is next to the player.""" diff = state.get_pos() - self._player_loc return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x ) + abs(diff.z) == 2 and state.get_block(self._player_loc + diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(self._block): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class _GenericBot: <|reserved_special_token_0|> def __init__(self, pos, inventory=None): """Initialize with an empty inventory. inventory is a dictionary. If None, an empty one will be used.""" if inventory is None: self._inventory = {} else: self._inventory = deepcopy(inventory) self._pos = deepcopy(pos) def take_action(self, action): """Take the action (acquired from _get_legal_actions).""" getattr(self, action['func'])(*action.get('args', ()), **action.get ('kwargs', {})) def take_actions(self, actions, seconds=None): """Take these actions. If seconds is not None, sleep 'seconds' seconds. """ if not actions: return self.take_action(actions[0]) for action in actions[1:]: if seconds is not None: sleep(seconds) self.take_action(action) def get_pos(self): """Return the position.""" return deepcopy(self._pos) def get_legal_actions(self, block_=None): """Return a list of legal actions. If block_ is None, return all legal actions. Otherwise, return all legal actions that don't involve placing the block.""" return self._get_move_actions(block_) + self._get_mine_actions( ) + self._get_placement_actions(block_) <|reserved_special_token_0|> <|reserved_special_token_0|> def _place(self, loc, exclude=None, block_=None): """Place a block from the inventory only. If exclude is not None, place a block that is not 'exclude'. If block is not None, place that block only. """ if not self._inventory: raise Exception('Inventory empty') if block_ is None: for key in self._inventory: if key != exclude: block_ = key break else: raise Exception( 'You requested not to place %s, but it is the only block in the inventory.' % exclude) if block_ not in self._inventory: raise Exception('Block %s is not in the inventory' % block_) if self._inventory[block_] == 1: del self._inventory[block_] else: self._inventory[block_] -= 1 self._set_block(loc, block_) def _move_down(self): """Move and mine the block below.""" new_pos = self._pos + _Vec3(0, -1, 0) block_ = self._get_block(new_pos) if block_ != _WATER: self._add_to_inv(block_) self._move(new_pos) <|reserved_special_token_0|> <|reserved_special_token_0|> def _mine(self, loc): """Mine the block.""" block_ = self._get_block(loc) self._add_to_inv(block_) self._set_block(loc, _AIR) def _get_move_actions(self, exclude=None): """Return a list of legal movement actions. exclude is the block to exclude. """ rtn = [] can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR, _WATER} if can_move_up: if self._surrounded(): rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, 1, 0),)}) else: rtn.append({'func': '_move_up', 'args': (exclude,)}) hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0)) if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}: rtn.append({'func': '_move_down'}) for dir_ in _adj_dirs(): rtn.extend(self._side_moves(dir_, can_move_up)) return rtn def _side_moves(self, dir_, can_move_up): """Return the list of side moves. dir_ is an adjacent direction. can_move_up is a boolean for whether or not the bot can move up. """ rtn = [] base_pos = self._pos + dir_ base_block = self._get_block(base_pos) empty_blocks = {_AIR, _WATER} if can_move_up and base_block not in {_AIR, _LAVA, _WATER}: for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1, 0),)}) for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: pos = base_pos + _Vec3(0, -1, 0) for _ in xrange(_DROP_PLUS_1): block_ = self._get_block(pos) if block_ != _AIR: if block_ != _LAVA: rtn.append({'func': '_move', 'args': (pos + _Vec3(0, 1, 0),)}) break pos.y -= 1 <|reserved_special_token_0|> def _get_mine_actions(self): """Return a list of legal mining actions (that only involve mining and not moving).""" rtn = [] dont_mine = {_AIR, _WATER, _LAVA} pos_above = self._pos + _Vec3(0, 2, 0) if self._get_block(pos_above) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos_above,)}) for dir_ in _adj_dirs(): pos = self._pos + dir_ for _ in xrange(2): if self._get_block(pos) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos,)}) pos = pos + _Vec3(0, 1, 0) return rtn def _get_placement_actions(self, exclude=None): """Return a list of legal actions that only involve placing a block from the inventory. exclude is a block id. It is the block that should not be placed. If None, any block can be placed.""" if not self._has_blocks_to_place(exclude=exclude): return [] dirs = [_Vec3(0, 2, 0)] for dir_ in _adj_dirs(): dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)]) if self._get_block(self._pos + dir_) in [_AIR, _WATER]: dirs.append(dir_ + _Vec3(0, -1, 0)) rtn = [] for dir_ in dirs: pos = self._pos + dir_ if self._can_place(pos): rtn.append({'func': '_place', 'args': (pos,), 'kwargs': { 'exclude': exclude}}) return rtn <|reserved_special_token_0|> def _has_blocks_to_place(self, exclude=None): """Return whether or not the bot can place a block from the inventory. If exclude is None, any block can be placed.""" for block_ in self._inventory: if block_ != exclude: return True return False <|reserved_special_token_0|> <|reserved_special_token_0|> class _ImaginaryBot(_GenericBot): """A bot used for finding paths that doesn't actually change blocks in the world.""" def __init__(self, pos, inventory=None): """Create a new bot.""" _GenericBot.__init__(self, pos, inventory) self._changes = {} def _set_block(self, pos, block_): """Set a block. block_ is the block id.""" self._changes[deepcopy(pos)] = block def _get_block(self, pos): """Get the block at the position.""" if pos in self._changes: return self._changes[pos] else: return _get_mc().getBlock(pos) def get_block(self, pos): """The public version.""" return self._get_block(pos) def __hash__(self): """Return the hash.""" return hash(frozenset([self._pos] + _key_vals(self._inventory) + _key_vals(self._changes))) class Bot(_GenericBot): """The real bot. All vector arguments are Vec3s.""" _BOT_BLOCK = block.IRON_BLOCK.id def __init__(self): """Create a bot next to the player.""" pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0) pos = _Vec3(pos.x, pos.y, pos.z) _GenericBot.__init__(self, pos) self._pos = pos self._move(self._pos) @staticmethod def destroy_all(): """Destroy all bots within a small distance (in case I forget to destroy one).""" player_loc = _player_loc() minec = _get_mc() rad = 10 for x in xrange(player_loc.x - rad, player_loc.x + rad): for y in xrange(player_loc.y - rad, player_loc.y + rad): for z in xrange(player_loc.z - rad, player_loc.z + rad): if minec.getBlock(x, y, z) == Bot._BOT_BLOCK: minec.setBlock(x, y, z, _AIR) def destroy(self): """Set itself to air.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) def fetch(self, block_name): """Mine and return a block to the player.""" imag_bot = _ImaginaryBot(self._pos, self._inventory) block_id = getattr(block, block_name).id block_loc = self._get_block_loc(block_id) mine_prob = _MineProblem(imag_bot, block_loc, block_id) mine_actions = astar(mine_prob, _mine_heuristic) self.take_actions(mine_actions, _DELAY) imag_bot = _ImaginaryBot(self._pos, self._inventory) player_loc = _player_loc() return_prob = _ReturnProblem(imag_bot, block_id, player_loc) return_actions = astar(return_prob, _return_heuristic) imag_bot.take_actions(return_actions) return_actions.append({'func': '_place', 'args': (imag_bot.get_pos( ) + player_loc) / 2, 'kwargs': {'block': block_id}}) self.take_actions(return_actions, _DELAY) def _get_block_loc(self, block_id): """Return the location of the block.""" find_prob = FindProblem(self._pos, block_id) dirs = bfs(find_prob) return self._pos + sum(dirs) def _set_block(self, pos, block_): """Place an actual block in the world. block is a block id.""" _get_mc().setBlock(pos, block_) def _get_block(self, pos): """Get the block at the position.""" return _get_mc().getBlock(pos) def _move(self, pos): """Move there, and set the appropriate blocks.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) self._set_block(pos, self._BOT_BLOCK) self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK) self._pos = pos class FindProblem(SearchProblem): """Problem for finding the location of a block in the world. A state in this problem is a location. """ def __init__(self, start_loc, block_id): """Initialize.""" self._start_loc = deepcopy(start_loc) self._block_id = block_id def getStartState(self): """Return the starting location.""" return self._start_loc def isGoalState(self, state): return _get_mc().getBlock(state) == self._block_id def getSuccessors(self, state): """Return the successors.""" rtn = [] for dir_ in _all_dirs(): successor = state + dir_ if successor.y <= _get_mc().getHeight(successor.x, successor.z ) and _get_mc().getBlock(successor) != _BEDROCK: rtn.append((successor, dir_, 1)) return rtn class _MineProblem(SearchProblem): """The problem of finding the block and mining it (not returning it).""" def __init__(self, imag_bot, block_loc, block_id): """Initialize the problem with an _ImaginaryBot. block_loc is a Vec3. """ self._bot = imag_bot self._block_loc = deepcopy(block_loc) self._block_id = block_id def get_block_loc(self): """Return the block location.""" return deepcopy(self._block_loc) def get_block_id(self): """Return the block it's trying to mine.""" return self._block_id def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot has the block.""" return state.contains(self._block_id) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn class _ReturnProblem(SearchProblem): """The problem of returning to the player. This does not place the block next to the player.""" def __init__(self, imag_bot, block_, player_loc): """Initialized the problem with an _ImaginaryBot. block is a block id.""" self._bot = imag_bot self._block = block_ self._player_loc = player_loc def get_player_loc(self): """Return the player location.""" return deepcopy(self._player_loc) def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot is next to the player.""" diff = state.get_pos() - self._player_loc return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x ) + abs(diff.z) == 2 and state.get_block(self._player_loc + diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(self._block): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class _GenericBot: <|reserved_special_token_0|> def __init__(self, pos, inventory=None): """Initialize with an empty inventory. inventory is a dictionary. If None, an empty one will be used.""" if inventory is None: self._inventory = {} else: self._inventory = deepcopy(inventory) self._pos = deepcopy(pos) def take_action(self, action): """Take the action (acquired from _get_legal_actions).""" getattr(self, action['func'])(*action.get('args', ()), **action.get ('kwargs', {})) def take_actions(self, actions, seconds=None): """Take these actions. If seconds is not None, sleep 'seconds' seconds. """ if not actions: return self.take_action(actions[0]) for action in actions[1:]: if seconds is not None: sleep(seconds) self.take_action(action) def get_pos(self): """Return the position.""" return deepcopy(self._pos) def get_legal_actions(self, block_=None): """Return a list of legal actions. If block_ is None, return all legal actions. Otherwise, return all legal actions that don't involve placing the block.""" return self._get_move_actions(block_) + self._get_mine_actions( ) + self._get_placement_actions(block_) def contains(self, block_): """Return whether or not the bot contains the block id.""" return block_ in self._inventory def _get_block(self, pos): """Get the block at the position.""" raise NotImplementedError def _place(self, loc, exclude=None, block_=None): """Place a block from the inventory only. If exclude is not None, place a block that is not 'exclude'. If block is not None, place that block only. """ if not self._inventory: raise Exception('Inventory empty') if block_ is None: for key in self._inventory: if key != exclude: block_ = key break else: raise Exception( 'You requested not to place %s, but it is the only block in the inventory.' % exclude) if block_ not in self._inventory: raise Exception('Block %s is not in the inventory' % block_) if self._inventory[block_] == 1: del self._inventory[block_] else: self._inventory[block_] -= 1 self._set_block(loc, block_) def _move_down(self): """Move and mine the block below.""" new_pos = self._pos + _Vec3(0, -1, 0) block_ = self._get_block(new_pos) if block_ != _WATER: self._add_to_inv(block_) self._move(new_pos) <|reserved_special_token_0|> def _move_up(self, exclude=None): """Move and place a block below. If exclude is not None, place a block that is not 'exclude'. """ self._move(self._pos + _Vec3(0, 1, 0)) self._place(self._pos + _Vec3(0, -1, 0), exclude) def _mine(self, loc): """Mine the block.""" block_ = self._get_block(loc) self._add_to_inv(block_) self._set_block(loc, _AIR) def _get_move_actions(self, exclude=None): """Return a list of legal movement actions. exclude is the block to exclude. """ rtn = [] can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR, _WATER} if can_move_up: if self._surrounded(): rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, 1, 0),)}) else: rtn.append({'func': '_move_up', 'args': (exclude,)}) hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0)) if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}: rtn.append({'func': '_move_down'}) for dir_ in _adj_dirs(): rtn.extend(self._side_moves(dir_, can_move_up)) return rtn def _side_moves(self, dir_, can_move_up): """Return the list of side moves. dir_ is an adjacent direction. can_move_up is a boolean for whether or not the bot can move up. """ rtn = [] base_pos = self._pos + dir_ base_block = self._get_block(base_pos) empty_blocks = {_AIR, _WATER} if can_move_up and base_block not in {_AIR, _LAVA, _WATER}: for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1, 0),)}) for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: pos = base_pos + _Vec3(0, -1, 0) for _ in xrange(_DROP_PLUS_1): block_ = self._get_block(pos) if block_ != _AIR: if block_ != _LAVA: rtn.append({'func': '_move', 'args': (pos + _Vec3(0, 1, 0),)}) break pos.y -= 1 def _surrounded(self): """Return whether or not the bot is surrounded by water.""" for dir_ in _adj_dirs(): if self._get_block(self._pos + dir_) != _WATER: return False return True def _get_mine_actions(self): """Return a list of legal mining actions (that only involve mining and not moving).""" rtn = [] dont_mine = {_AIR, _WATER, _LAVA} pos_above = self._pos + _Vec3(0, 2, 0) if self._get_block(pos_above) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos_above,)}) for dir_ in _adj_dirs(): pos = self._pos + dir_ for _ in xrange(2): if self._get_block(pos) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos,)}) pos = pos + _Vec3(0, 1, 0) return rtn def _get_placement_actions(self, exclude=None): """Return a list of legal actions that only involve placing a block from the inventory. exclude is a block id. It is the block that should not be placed. If None, any block can be placed.""" if not self._has_blocks_to_place(exclude=exclude): return [] dirs = [_Vec3(0, 2, 0)] for dir_ in _adj_dirs(): dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)]) if self._get_block(self._pos + dir_) in [_AIR, _WATER]: dirs.append(dir_ + _Vec3(0, -1, 0)) rtn = [] for dir_ in dirs: pos = self._pos + dir_ if self._can_place(pos): rtn.append({'func': '_place', 'args': (pos,), 'kwargs': { 'exclude': exclude}}) return rtn def _can_place(self, loc): """Return whether or not the bot can place a block at that location independent of what it has in its inventory.""" non_blocks = [_AIR, _WATER, _LAVA] player = [self._pos, self._pos + _Vec3(0, 1, 0)] for dir_ in (_adj_dirs + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)]): new_loc = loc + dir_ if new_loc not in player and self._get_block(new_loc ) not in non_blocks: return True return False def _has_blocks_to_place(self, exclude=None): """Return whether or not the bot can place a block from the inventory. If exclude is None, any block can be placed.""" for block_ in self._inventory: if block_ != exclude: return True return False <|reserved_special_token_0|> <|reserved_special_token_0|> class _ImaginaryBot(_GenericBot): """A bot used for finding paths that doesn't actually change blocks in the world.""" def __init__(self, pos, inventory=None): """Create a new bot.""" _GenericBot.__init__(self, pos, inventory) self._changes = {} def _set_block(self, pos, block_): """Set a block. block_ is the block id.""" self._changes[deepcopy(pos)] = block def _get_block(self, pos): """Get the block at the position.""" if pos in self._changes: return self._changes[pos] else: return _get_mc().getBlock(pos) def get_block(self, pos): """The public version.""" return self._get_block(pos) def __hash__(self): """Return the hash.""" return hash(frozenset([self._pos] + _key_vals(self._inventory) + _key_vals(self._changes))) class Bot(_GenericBot): """The real bot. All vector arguments are Vec3s.""" _BOT_BLOCK = block.IRON_BLOCK.id def __init__(self): """Create a bot next to the player.""" pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0) pos = _Vec3(pos.x, pos.y, pos.z) _GenericBot.__init__(self, pos) self._pos = pos self._move(self._pos) @staticmethod def destroy_all(): """Destroy all bots within a small distance (in case I forget to destroy one).""" player_loc = _player_loc() minec = _get_mc() rad = 10 for x in xrange(player_loc.x - rad, player_loc.x + rad): for y in xrange(player_loc.y - rad, player_loc.y + rad): for z in xrange(player_loc.z - rad, player_loc.z + rad): if minec.getBlock(x, y, z) == Bot._BOT_BLOCK: minec.setBlock(x, y, z, _AIR) def destroy(self): """Set itself to air.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) def fetch(self, block_name): """Mine and return a block to the player.""" imag_bot = _ImaginaryBot(self._pos, self._inventory) block_id = getattr(block, block_name).id block_loc = self._get_block_loc(block_id) mine_prob = _MineProblem(imag_bot, block_loc, block_id) mine_actions = astar(mine_prob, _mine_heuristic) self.take_actions(mine_actions, _DELAY) imag_bot = _ImaginaryBot(self._pos, self._inventory) player_loc = _player_loc() return_prob = _ReturnProblem(imag_bot, block_id, player_loc) return_actions = astar(return_prob, _return_heuristic) imag_bot.take_actions(return_actions) return_actions.append({'func': '_place', 'args': (imag_bot.get_pos( ) + player_loc) / 2, 'kwargs': {'block': block_id}}) self.take_actions(return_actions, _DELAY) def _get_block_loc(self, block_id): """Return the location of the block.""" find_prob = FindProblem(self._pos, block_id) dirs = bfs(find_prob) return self._pos + sum(dirs) def _set_block(self, pos, block_): """Place an actual block in the world. block is a block id.""" _get_mc().setBlock(pos, block_) def _get_block(self, pos): """Get the block at the position.""" return _get_mc().getBlock(pos) def _move(self, pos): """Move there, and set the appropriate blocks.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) self._set_block(pos, self._BOT_BLOCK) self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK) self._pos = pos class FindProblem(SearchProblem): """Problem for finding the location of a block in the world. A state in this problem is a location. """ def __init__(self, start_loc, block_id): """Initialize.""" self._start_loc = deepcopy(start_loc) self._block_id = block_id def getStartState(self): """Return the starting location.""" return self._start_loc def isGoalState(self, state): return _get_mc().getBlock(state) == self._block_id def getSuccessors(self, state): """Return the successors.""" rtn = [] for dir_ in _all_dirs(): successor = state + dir_ if successor.y <= _get_mc().getHeight(successor.x, successor.z ) and _get_mc().getBlock(successor) != _BEDROCK: rtn.append((successor, dir_, 1)) return rtn class _MineProblem(SearchProblem): """The problem of finding the block and mining it (not returning it).""" def __init__(self, imag_bot, block_loc, block_id): """Initialize the problem with an _ImaginaryBot. block_loc is a Vec3. """ self._bot = imag_bot self._block_loc = deepcopy(block_loc) self._block_id = block_id def get_block_loc(self): """Return the block location.""" return deepcopy(self._block_loc) def get_block_id(self): """Return the block it's trying to mine.""" return self._block_id def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot has the block.""" return state.contains(self._block_id) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn class _ReturnProblem(SearchProblem): """The problem of returning to the player. This does not place the block next to the player.""" def __init__(self, imag_bot, block_, player_loc): """Initialized the problem with an _ImaginaryBot. block is a block id.""" self._bot = imag_bot self._block = block_ self._player_loc = player_loc def get_player_loc(self): """Return the player location.""" return deepcopy(self._player_loc) def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot is next to the player.""" diff = state.get_pos() - self._player_loc return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x ) + abs(diff.z) == 2 and state.get_block(self._player_loc + diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(self._block): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class _GenericBot: <|reserved_special_token_0|> def __init__(self, pos, inventory=None): """Initialize with an empty inventory. inventory is a dictionary. If None, an empty one will be used.""" if inventory is None: self._inventory = {} else: self._inventory = deepcopy(inventory) self._pos = deepcopy(pos) def take_action(self, action): """Take the action (acquired from _get_legal_actions).""" getattr(self, action['func'])(*action.get('args', ()), **action.get ('kwargs', {})) def take_actions(self, actions, seconds=None): """Take these actions. If seconds is not None, sleep 'seconds' seconds. """ if not actions: return self.take_action(actions[0]) for action in actions[1:]: if seconds is not None: sleep(seconds) self.take_action(action) def get_pos(self): """Return the position.""" return deepcopy(self._pos) def get_legal_actions(self, block_=None): """Return a list of legal actions. If block_ is None, return all legal actions. Otherwise, return all legal actions that don't involve placing the block.""" return self._get_move_actions(block_) + self._get_mine_actions( ) + self._get_placement_actions(block_) def contains(self, block_): """Return whether or not the bot contains the block id.""" return block_ in self._inventory def _get_block(self, pos): """Get the block at the position.""" raise NotImplementedError def _place(self, loc, exclude=None, block_=None): """Place a block from the inventory only. If exclude is not None, place a block that is not 'exclude'. If block is not None, place that block only. """ if not self._inventory: raise Exception('Inventory empty') if block_ is None: for key in self._inventory: if key != exclude: block_ = key break else: raise Exception( 'You requested not to place %s, but it is the only block in the inventory.' % exclude) if block_ not in self._inventory: raise Exception('Block %s is not in the inventory' % block_) if self._inventory[block_] == 1: del self._inventory[block_] else: self._inventory[block_] -= 1 self._set_block(loc, block_) def _move_down(self): """Move and mine the block below.""" new_pos = self._pos + _Vec3(0, -1, 0) block_ = self._get_block(new_pos) if block_ != _WATER: self._add_to_inv(block_) self._move(new_pos) <|reserved_special_token_0|> def _move_up(self, exclude=None): """Move and place a block below. If exclude is not None, place a block that is not 'exclude'. """ self._move(self._pos + _Vec3(0, 1, 0)) self._place(self._pos + _Vec3(0, -1, 0), exclude) def _mine(self, loc): """Mine the block.""" block_ = self._get_block(loc) self._add_to_inv(block_) self._set_block(loc, _AIR) def _get_move_actions(self, exclude=None): """Return a list of legal movement actions. exclude is the block to exclude. """ rtn = [] can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR, _WATER} if can_move_up: if self._surrounded(): rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, 1, 0),)}) else: rtn.append({'func': '_move_up', 'args': (exclude,)}) hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0)) if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}: rtn.append({'func': '_move_down'}) for dir_ in _adj_dirs(): rtn.extend(self._side_moves(dir_, can_move_up)) return rtn def _side_moves(self, dir_, can_move_up): """Return the list of side moves. dir_ is an adjacent direction. can_move_up is a boolean for whether or not the bot can move up. """ rtn = [] base_pos = self._pos + dir_ base_block = self._get_block(base_pos) empty_blocks = {_AIR, _WATER} if can_move_up and base_block not in {_AIR, _LAVA, _WATER}: for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1, 0),)}) for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: pos = base_pos + _Vec3(0, -1, 0) for _ in xrange(_DROP_PLUS_1): block_ = self._get_block(pos) if block_ != _AIR: if block_ != _LAVA: rtn.append({'func': '_move', 'args': (pos + _Vec3(0, 1, 0),)}) break pos.y -= 1 def _surrounded(self): """Return whether or not the bot is surrounded by water.""" for dir_ in _adj_dirs(): if self._get_block(self._pos + dir_) != _WATER: return False return True def _get_mine_actions(self): """Return a list of legal mining actions (that only involve mining and not moving).""" rtn = [] dont_mine = {_AIR, _WATER, _LAVA} pos_above = self._pos + _Vec3(0, 2, 0) if self._get_block(pos_above) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos_above,)}) for dir_ in _adj_dirs(): pos = self._pos + dir_ for _ in xrange(2): if self._get_block(pos) not in dont_mine: rtn.append({'func': '_mine', 'args': (pos,)}) pos = pos + _Vec3(0, 1, 0) return rtn def _get_placement_actions(self, exclude=None): """Return a list of legal actions that only involve placing a block from the inventory. exclude is a block id. It is the block that should not be placed. If None, any block can be placed.""" if not self._has_blocks_to_place(exclude=exclude): return [] dirs = [_Vec3(0, 2, 0)] for dir_ in _adj_dirs(): dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)]) if self._get_block(self._pos + dir_) in [_AIR, _WATER]: dirs.append(dir_ + _Vec3(0, -1, 0)) rtn = [] for dir_ in dirs: pos = self._pos + dir_ if self._can_place(pos): rtn.append({'func': '_place', 'args': (pos,), 'kwargs': { 'exclude': exclude}}) return rtn def _can_place(self, loc): """Return whether or not the bot can place a block at that location independent of what it has in its inventory.""" non_blocks = [_AIR, _WATER, _LAVA] player = [self._pos, self._pos + _Vec3(0, 1, 0)] for dir_ in (_adj_dirs + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)]): new_loc = loc + dir_ if new_loc not in player and self._get_block(new_loc ) not in non_blocks: return True return False def _has_blocks_to_place(self, exclude=None): """Return whether or not the bot can place a block from the inventory. If exclude is None, any block can be placed.""" for block_ in self._inventory: if block_ != exclude: return True return False def _set_block(self, pos, block_): """Set a block. block_ is the block id.""" raise NotImplementedError def _move(self, pos): """Move there only.""" self._pos = deepcopy(pos) class _ImaginaryBot(_GenericBot): """A bot used for finding paths that doesn't actually change blocks in the world.""" def __init__(self, pos, inventory=None): """Create a new bot.""" _GenericBot.__init__(self, pos, inventory) self._changes = {} def _set_block(self, pos, block_): """Set a block. block_ is the block id.""" self._changes[deepcopy(pos)] = block def _get_block(self, pos): """Get the block at the position.""" if pos in self._changes: return self._changes[pos] else: return _get_mc().getBlock(pos) def get_block(self, pos): """The public version.""" return self._get_block(pos) def __hash__(self): """Return the hash.""" return hash(frozenset([self._pos] + _key_vals(self._inventory) + _key_vals(self._changes))) class Bot(_GenericBot): """The real bot. All vector arguments are Vec3s.""" _BOT_BLOCK = block.IRON_BLOCK.id def __init__(self): """Create a bot next to the player.""" pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0) pos = _Vec3(pos.x, pos.y, pos.z) _GenericBot.__init__(self, pos) self._pos = pos self._move(self._pos) @staticmethod def destroy_all(): """Destroy all bots within a small distance (in case I forget to destroy one).""" player_loc = _player_loc() minec = _get_mc() rad = 10 for x in xrange(player_loc.x - rad, player_loc.x + rad): for y in xrange(player_loc.y - rad, player_loc.y + rad): for z in xrange(player_loc.z - rad, player_loc.z + rad): if minec.getBlock(x, y, z) == Bot._BOT_BLOCK: minec.setBlock(x, y, z, _AIR) def destroy(self): """Set itself to air.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) def fetch(self, block_name): """Mine and return a block to the player.""" imag_bot = _ImaginaryBot(self._pos, self._inventory) block_id = getattr(block, block_name).id block_loc = self._get_block_loc(block_id) mine_prob = _MineProblem(imag_bot, block_loc, block_id) mine_actions = astar(mine_prob, _mine_heuristic) self.take_actions(mine_actions, _DELAY) imag_bot = _ImaginaryBot(self._pos, self._inventory) player_loc = _player_loc() return_prob = _ReturnProblem(imag_bot, block_id, player_loc) return_actions = astar(return_prob, _return_heuristic) imag_bot.take_actions(return_actions) return_actions.append({'func': '_place', 'args': (imag_bot.get_pos( ) + player_loc) / 2, 'kwargs': {'block': block_id}}) self.take_actions(return_actions, _DELAY) def _get_block_loc(self, block_id): """Return the location of the block.""" find_prob = FindProblem(self._pos, block_id) dirs = bfs(find_prob) return self._pos + sum(dirs) def _set_block(self, pos, block_): """Place an actual block in the world. block is a block id.""" _get_mc().setBlock(pos, block_) def _get_block(self, pos): """Get the block at the position.""" return _get_mc().getBlock(pos) def _move(self, pos): """Move there, and set the appropriate blocks.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) self._set_block(pos, self._BOT_BLOCK) self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK) self._pos = pos class FindProblem(SearchProblem): """Problem for finding the location of a block in the world. A state in this problem is a location. """ def __init__(self, start_loc, block_id): """Initialize.""" self._start_loc = deepcopy(start_loc) self._block_id = block_id def getStartState(self): """Return the starting location.""" return self._start_loc def isGoalState(self, state): return _get_mc().getBlock(state) == self._block_id def getSuccessors(self, state): """Return the successors.""" rtn = [] for dir_ in _all_dirs(): successor = state + dir_ if successor.y <= _get_mc().getHeight(successor.x, successor.z ) and _get_mc().getBlock(successor) != _BEDROCK: rtn.append((successor, dir_, 1)) return rtn class _MineProblem(SearchProblem): """The problem of finding the block and mining it (not returning it).""" def __init__(self, imag_bot, block_loc, block_id): """Initialize the problem with an _ImaginaryBot. block_loc is a Vec3. """ self._bot = imag_bot self._block_loc = deepcopy(block_loc) self._block_id = block_id def get_block_loc(self): """Return the block location.""" return deepcopy(self._block_loc) def get_block_id(self): """Return the block it's trying to mine.""" return self._block_id def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot has the block.""" return state.contains(self._block_id) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn class _ReturnProblem(SearchProblem): """The problem of returning to the player. This does not place the block next to the player.""" def __init__(self, imag_bot, block_, player_loc): """Initialized the problem with an _ImaginaryBot. block is a block id.""" self._bot = imag_bot self._block = block_ self._player_loc = player_loc def get_player_loc(self): """Return the player location.""" return deepcopy(self._player_loc) def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot is next to the player.""" diff = state.get_pos() - self._player_loc return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x ) + abs(diff.z) == 2 and state.get_block(self._player_loc + diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(self._block): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn <|reserved_special_token_0|> <|reserved_special_token_1|> """Module for the bot""" from copy import deepcopy from time import sleep import mcpi.minecraft as minecraft from mcpi.vec3 import Vec3 import mcpi.block as block from search import SearchProblem, astar, bfs from singleton import singleton _AIR = block.AIR.id _WATER = block.WATER.id _LAVA = block.LAVA.id _BEDROCK = block.BEDROCK.id _DROP = 2 # It can drop at most this many _DROP_PLUS_1 = _DROP + 1 _DELAY = 1 class _Vec3(Vec3): """A Vec3 that is hashable. Everything in this program should use this class.""" def __hash__(self): """Return the hash.""" return hash((self.x, self.y, self.z)) def clone(self): """Return a clone.""" return _Vec3(self.x, self.y, self.z) class _GenericBot: """A generic bot.""" def __init__(self, pos, inventory=None): """Initialize with an empty inventory. inventory is a dictionary. If None, an empty one will be used.""" if inventory is None: self._inventory = {} else: self._inventory = deepcopy(inventory) self._pos = deepcopy(pos) def take_action(self, action): """Take the action (acquired from _get_legal_actions).""" getattr(self, action['func'])( *action.get('args', ()), **action.get('kwargs', {}) ) def take_actions(self, actions, seconds=None): """Take these actions. If seconds is not None, sleep 'seconds' seconds. """ if not actions: return self.take_action(actions[0]) for action in actions[1:]: if seconds is not None: sleep(seconds) self.take_action(action) def get_pos(self): """Return the position.""" return deepcopy(self._pos) def get_legal_actions(self, block_=None): """Return a list of legal actions. If block_ is None, return all legal actions. Otherwise, return all legal actions that don't involve placing the block.""" return self._get_move_actions(block_) + self._get_mine_actions() + \ self._get_placement_actions(block_) def contains(self, block_): """Return whether or not the bot contains the block id.""" return block_ in self._inventory def _get_block(self, pos): """Get the block at the position.""" raise NotImplementedError def _place(self, loc, exclude=None, block_=None): """Place a block from the inventory only. If exclude is not None, place a block that is not 'exclude'. If block is not None, place that block only. """ if not self._inventory: raise Exception('Inventory empty') if block_ is None: for key in self._inventory: if key != exclude: block_ = key break else: raise Exception(( 'You requested not to place %s, but it is the only ' 'block in the inventory.' % exclude )) if block_ not in self._inventory: raise Exception('Block %s is not in the inventory' % block_) if self._inventory[block_] == 1: del self._inventory[block_] else: self._inventory[block_] -= 1 self._set_block(loc, block_) def _move_down(self): """Move and mine the block below.""" new_pos = self._pos + _Vec3(0, -1, 0) block_ = self._get_block(new_pos) if block_ != _WATER: self._add_to_inv(block_) self._move(new_pos) def _add_to_inv(self, block_): """Add the block to the inventory.""" if block_ in self._inventory: self._inventory[block_] += 1 else: self._inventory[block_] = 1 def _move_up(self, exclude=None): """Move and place a block below. If exclude is not None, place a block that is not 'exclude'. """ self._move(self._pos + _Vec3(0, 1, 0)) self._place(self._pos + _Vec3(0, -1, 0), exclude) def _mine(self, loc): """Mine the block.""" block_ = self._get_block(loc) self._add_to_inv(block_) self._set_block(loc, _AIR) def _get_move_actions(self, exclude=None): """Return a list of legal movement actions. exclude is the block to exclude. """ rtn = [] # Check for moving up can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR, _WATER} if can_move_up: if self._surrounded(): rtn.append({ 'func': '_move', 'args': (self._pos + _Vec3(0, 1, 0),) }) else: rtn.append({ 'func': '_move_up', 'args': (exclude,) }) # Check for moving down hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0)) if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}: rtn.append({'func': '_move_down'}) # Check for side moves for dir_ in _adj_dirs(): rtn.extend(self._side_moves(dir_, can_move_up)) return rtn def _side_moves(self, dir_, can_move_up): """Return the list of side moves. dir_ is an adjacent direction. can_move_up is a boolean for whether or not the bot can move up. """ rtn = [] base_pos = self._pos + dir_ base_block = self._get_block(base_pos) empty_blocks = {_AIR, _WATER} # Check if it can move up if can_move_up and base_block not in {_AIR, _LAVA, _WATER}: for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break else: rtn.append({ 'func': '_move', 'args': (base_pos + _Vec3(0, 1, 0),) }) # Check if it can move in that direction for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]: if self._get_block(base_pos + vert_dir) not in empty_blocks: break # Fall else: pos = base_pos + _Vec3(0, -1, 0) for _ in xrange(_DROP_PLUS_1): block_ = self._get_block(pos) if block_ != _AIR: if block_ != _LAVA: rtn.append({ 'func': '_move', 'args': (pos + _Vec3(0, 1, 0),) }) break pos.y -= 1 def _surrounded(self): """Return whether or not the bot is surrounded by water.""" for dir_ in _adj_dirs(): if self._get_block(self._pos + dir_) != _WATER: return False return True def _get_mine_actions(self): """Return a list of legal mining actions (that only involve mining and not moving).""" rtn = [] dont_mine = {_AIR, _WATER, _LAVA} # Mine above. pos_above = self._pos + _Vec3(0, 2, 0) if self._get_block(pos_above) not in dont_mine: rtn.append({ 'func': '_mine', 'args': (pos_above,) }) for dir_ in _adj_dirs(): pos = self._pos + dir_ for _ in xrange(2): if self._get_block(pos) not in dont_mine: rtn.append({ 'func': '_mine', 'args': (pos,) }) pos = pos + _Vec3(0, 1, 0) return rtn def _get_placement_actions(self, exclude=None): """Return a list of legal actions that only involve placing a block from the inventory. exclude is a block id. It is the block that should not be placed. If None, any block can be placed.""" if not self._has_blocks_to_place(exclude=exclude): return [] dirs = [_Vec3(0, 2, 0)] for dir_ in _adj_dirs(): dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)]) if self._get_block(self._pos + dir_) in [_AIR, _WATER]: dirs.append(dir_ + _Vec3(0, -1, 0)) rtn = [] for dir_ in dirs: pos = self._pos + dir_ if self._can_place(pos): rtn.append({ 'func': '_place', 'args': (pos,), 'kwargs': {'exclude': exclude} }) return rtn def _can_place(self, loc): """Return whether or not the bot can place a block at that location independent of what it has in its inventory.""" non_blocks = [_AIR, _WATER, _LAVA] player = [self._pos, self._pos + _Vec3(0, 1, 0)] for dir_ in _adj_dirs + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)]: new_loc = loc + dir_ if new_loc not in player and self._get_block(new_loc) \ not in non_blocks: return True return False def _has_blocks_to_place(self, exclude=None): """Return whether or not the bot can place a block from the inventory. If exclude is None, any block can be placed.""" for block_ in self._inventory: if block_ != exclude: return True return False def _set_block(self, pos, block_): """Set a block. block_ is the block id.""" raise NotImplementedError def _move(self, pos): """Move there only.""" self._pos = deepcopy(pos) class _ImaginaryBot(_GenericBot): """A bot used for finding paths that doesn't actually change blocks in the world.""" def __init__(self, pos, inventory=None): """Create a new bot.""" _GenericBot.__init__(self, pos, inventory) self._changes = {} # Changes to the world def _set_block(self, pos, block_): """Set a block. block_ is the block id.""" self._changes[deepcopy(pos)] = block def _get_block(self, pos): """Get the block at the position.""" if pos in self._changes: return self._changes[pos] else: return _get_mc().getBlock(pos) def get_block(self, pos): """The public version.""" return self._get_block(pos) def __hash__(self): """Return the hash.""" return hash(frozenset([self._pos] + \ _key_vals(self._inventory) + \ _key_vals(self._changes) )) class Bot(_GenericBot): """The real bot. All vector arguments are Vec3s.""" _BOT_BLOCK = block.IRON_BLOCK.id def __init__(self): """Create a bot next to the player.""" pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0) pos = _Vec3(pos.x, pos.y, pos.z) _GenericBot.__init__(self, pos) self._pos = pos self._move(self._pos) @staticmethod def destroy_all(): """Destroy all bots within a small distance (in case I forget to destroy one).""" player_loc = _player_loc() minec = _get_mc() rad = 10 for x in xrange(player_loc.x - rad, player_loc.x + rad): for y in xrange(player_loc.y - rad, player_loc.y + rad): for z in xrange(player_loc.z - rad, player_loc.z + rad): if minec.getBlock(x, y, z) == Bot._BOT_BLOCK: minec.setBlock(x, y, z, _AIR) def destroy(self): """Set itself to air.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) def fetch(self, block_name): """Mine and return a block to the player.""" imag_bot = _ImaginaryBot(self._pos, self._inventory) block_id = getattr(block, block_name).id block_loc = self._get_block_loc(block_id) mine_prob = _MineProblem(imag_bot, block_loc, block_id) mine_actions = astar(mine_prob, _mine_heuristic) self.take_actions(mine_actions, _DELAY) imag_bot = _ImaginaryBot(self._pos, self._inventory) player_loc = _player_loc() return_prob = _ReturnProblem(imag_bot, block_id, player_loc) return_actions = astar(return_prob, _return_heuristic) imag_bot.take_actions(return_actions) return_actions.append({ 'func': '_place', 'args': (imag_bot.get_pos() + player_loc) / 2, 'kwargs': {'block': block_id} }) self.take_actions(return_actions, _DELAY) def _get_block_loc(self, block_id): """Return the location of the block.""" find_prob = FindProblem(self._pos, block_id) dirs = bfs(find_prob) return self._pos + sum(dirs) def _set_block(self, pos, block_): """Place an actual block in the world. block is a block id.""" _get_mc().setBlock(pos, block_) def _get_block(self, pos): """Get the block at the position.""" return _get_mc().getBlock(pos) def _move(self, pos): """Move there, and set the appropriate blocks.""" self._set_block(self._pos, _AIR) self._set_block(self._pos + _Vec3(0, 1, 0), _AIR) self._set_block(pos, self._BOT_BLOCK) self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK) self._pos = pos class FindProblem(SearchProblem): """Problem for finding the location of a block in the world. A state in this problem is a location. """ def __init__(self, start_loc, block_id): """Initialize.""" self._start_loc = deepcopy(start_loc) self._block_id = block_id def getStartState(self): """Return the starting location.""" return self._start_loc def isGoalState(self, state): return _get_mc().getBlock(state) == self._block_id def getSuccessors(self, state): """Return the successors.""" rtn = [] for dir_ in _all_dirs(): successor = state + dir_ if successor.y <= _get_mc().getHeight(successor.x, successor.z) \ and _get_mc().getBlock(successor) != _BEDROCK: rtn.append((successor, dir_, 1)) return rtn class _MineProblem(SearchProblem): """The problem of finding the block and mining it (not returning it).""" def __init__(self, imag_bot, block_loc, block_id): """Initialize the problem with an _ImaginaryBot. block_loc is a Vec3. """ self._bot = imag_bot self._block_loc = deepcopy(block_loc) self._block_id = block_id def get_block_loc(self): """Return the block location.""" return deepcopy(self._block_loc) def get_block_id(self): """Return the block it's trying to mine.""" return self._block_id def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot has the block.""" return state.contains(self._block_id) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn class _ReturnProblem(SearchProblem): """The problem of returning to the player. This does not place the block next to the player.""" def __init__(self, imag_bot, block_, player_loc): """Initialized the problem with an _ImaginaryBot. block is a block id.""" self._bot = imag_bot self._block = block_ self._player_loc = player_loc def get_player_loc(self): """Return the player location.""" return deepcopy(self._player_loc) def getStartState(self): """Return the bot passed in.""" return self._bot def isGoalState(self, state): """Return whether or not the bot is next to the player.""" diff = state.get_pos() - self._player_loc return diff.y == 0 and (diff.x == 0 or diff.z == 0) and \ abs(diff.x) + abs(diff.z) == 2 and \ state.get_block(self._player_loc + diff/2 + _Vec3(0, -1, 0)) not in \ (_AIR, _LAVA, _WATER) def getSuccessors(self, state): """Return the successors.""" rtn = [] for action in state.get_legal_actions(self._block): successor = deepcopy(state) successor.take_action(action) rtn.append((successor, action, 1)) return rtn def _mine_heuristic(bot, problem): """Return the mining heuristic. bot is an _ImaginaryBot. """ if bot.contains(problem.get_block_id()): return 0 bot_pos = bot.get_pos() dest_pos = problem.get_block_loc() # If man == dy: return man + 1 # If man > dy: return man # If man < dy: return dy? man_dist = _manhattan((bot_pos.x, bot_pos.z), (dest_pos.x, dest_pos.z)) y_diff = bot_pos.y - dest_pos.y if y_diff < 0: y_diff += 1 if y_diff == 0: return man_dist # Transform so that it's only dropping drop = _DROP if y_diff > 0 else 1 y_diff = abs(y_diff) drops = _drops(y_diff, drop) if man_dist > drops: return man_dist if man_dist == drops: return man_dist + 1 if drop == 1: return drops if y_diff % drop == 1: return drops return drops + 1 def _drops(dist, drop): """Return the number of times it takes to drop a distance dist. drop is the length of one drop. Both are assumed positive.""" rtn = dist / drop if dist % drop != 0: rtn += 1 return rtn def _return_heuristic(bot, problem): """Return the return heuristic. bot is an _ImaginaryBot. """ bot_pos = bot.get_pos() player_pos = problem.get_player_loc() bot_plane_pos = (bot.x, bot.z) y_diff = bot_pos.y - player_pos.y drop = _DROP if y_diff > 0 else 1 y_diff = abs(y_diff) drops = _drops(y_diff, drop) min_man = float('inf') for dir_ in _adj_dirs(): loc = player_pos + 2 * dir_ man_dist = _manhattan(bot_plane_pos, (loc.x, loc.z)) if man_dist < min_man: min_man = man_dist if man_dist < drops: return drops return min_man def _to_my_vec3(vec): """Return the _Vec3 alternative of the Vec3.""" return _Vec3(vec.x, vec.y, vec.z) def _player_loc(): """Return the player's location.""" return _to_my_vec3(_get_mc().player.getTilePos()) def _adj_dirs(): """Return the adjacent directions.""" return [_Vec3(1, 0, 0), _Vec3(-1, 0, 0), _Vec3(0, 0, 1), _Vec3(0, 0, -1)] def _all_dirs(): """Return all adjacent directions.""" return _adj_dirs() + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)] def _manhattan(pos1, pos2): """Return the manhattan distance. pos1 and pos2 should be iterable.""" return sum(abs(val1 - val2) for val1, val2 in zip(pos1, pos2)) @singleton def _get_mc(): """Return the Minecraft instance.""" return minecraft.Minecraft.create() def _key_vals(dict_): """Return a list of key-val tuples.""" return [(key, val) for key, val in dict_.iteritems()]
flexible
{ "blob_id": "54f0ed5f705d5ada28721301f297b2b0058773ad", "index": 2, "step-1": "<mask token>\n\n\nclass _GenericBot:\n <mask token>\n\n def __init__(self, pos, inventory=None):\n \"\"\"Initialize with an empty inventory.\n\n inventory is a dictionary. If None, an empty one will be used.\"\"\"\n if inventory is None:\n self._inventory = {}\n else:\n self._inventory = deepcopy(inventory)\n self._pos = deepcopy(pos)\n\n def take_action(self, action):\n \"\"\"Take the action (acquired from _get_legal_actions).\"\"\"\n getattr(self, action['func'])(*action.get('args', ()), **action.get\n ('kwargs', {}))\n\n def take_actions(self, actions, seconds=None):\n \"\"\"Take these actions. If seconds is not None, sleep 'seconds' \n seconds.\n \"\"\"\n if not actions:\n return\n self.take_action(actions[0])\n for action in actions[1:]:\n if seconds is not None:\n sleep(seconds)\n self.take_action(action)\n\n def get_pos(self):\n \"\"\"Return the position.\"\"\"\n return deepcopy(self._pos)\n\n def get_legal_actions(self, block_=None):\n \"\"\"Return a list of legal actions.\n\n If block_ is None, return all legal actions. Otherwise, return all\n legal actions that don't involve placing the block.\"\"\"\n return self._get_move_actions(block_) + self._get_mine_actions(\n ) + self._get_placement_actions(block_)\n <mask token>\n <mask token>\n\n def _place(self, loc, exclude=None, block_=None):\n \"\"\"Place a block from the inventory only.\n\n If exclude is not None, place a block that is not 'exclude'.\n If block is not None, place that block only.\n \"\"\"\n if not self._inventory:\n raise Exception('Inventory empty')\n if block_ is None:\n for key in self._inventory:\n if key != exclude:\n block_ = key\n break\n else:\n raise Exception(\n 'You requested not to place %s, but it is the only block in the inventory.'\n % exclude)\n if block_ not in self._inventory:\n raise Exception('Block %s is not in the inventory' % block_)\n if self._inventory[block_] == 1:\n del self._inventory[block_]\n else:\n self._inventory[block_] -= 1\n self._set_block(loc, block_)\n <mask token>\n <mask token>\n <mask token>\n\n def _mine(self, loc):\n \"\"\"Mine the block.\"\"\"\n block_ = self._get_block(loc)\n self._add_to_inv(block_)\n self._set_block(loc, _AIR)\n\n def _get_move_actions(self, exclude=None):\n \"\"\"Return a list of legal movement actions.\n\n exclude is the block to exclude.\n \"\"\"\n rtn = []\n can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR,\n _WATER}\n if can_move_up:\n if self._surrounded():\n rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, \n 1, 0),)})\n else:\n rtn.append({'func': '_move_up', 'args': (exclude,)})\n hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0))\n if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}:\n rtn.append({'func': '_move_down'})\n for dir_ in _adj_dirs():\n rtn.extend(self._side_moves(dir_, can_move_up))\n return rtn\n\n def _side_moves(self, dir_, can_move_up):\n \"\"\"Return the list of side moves.\n\n dir_ is an adjacent direction.\n can_move_up is a boolean for whether or not the bot can move up.\n \"\"\"\n rtn = []\n base_pos = self._pos + dir_\n base_block = self._get_block(base_pos)\n empty_blocks = {_AIR, _WATER}\n if can_move_up and base_block not in {_AIR, _LAVA, _WATER}:\n for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1,\n 0),)})\n for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n pos = base_pos + _Vec3(0, -1, 0)\n for _ in xrange(_DROP_PLUS_1):\n block_ = self._get_block(pos)\n if block_ != _AIR:\n if block_ != _LAVA:\n rtn.append({'func': '_move', 'args': (pos + _Vec3(0,\n 1, 0),)})\n break\n pos.y -= 1\n <mask token>\n\n def _get_mine_actions(self):\n \"\"\"Return a list of legal mining actions (that only involve mining\n and not moving).\"\"\"\n rtn = []\n dont_mine = {_AIR, _WATER, _LAVA}\n pos_above = self._pos + _Vec3(0, 2, 0)\n if self._get_block(pos_above) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos_above,)})\n for dir_ in _adj_dirs():\n pos = self._pos + dir_\n for _ in xrange(2):\n if self._get_block(pos) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos,)})\n pos = pos + _Vec3(0, 1, 0)\n return rtn\n\n def _get_placement_actions(self, exclude=None):\n \"\"\"Return a list of legal actions that only involve placing a block\n from the inventory.\n\n exclude is a block id. It is the block that should not be placed. If None,\n any block can be placed.\"\"\"\n if not self._has_blocks_to_place(exclude=exclude):\n return []\n dirs = [_Vec3(0, 2, 0)]\n for dir_ in _adj_dirs():\n dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)])\n if self._get_block(self._pos + dir_) in [_AIR, _WATER]:\n dirs.append(dir_ + _Vec3(0, -1, 0))\n rtn = []\n for dir_ in dirs:\n pos = self._pos + dir_\n if self._can_place(pos):\n rtn.append({'func': '_place', 'args': (pos,), 'kwargs': {\n 'exclude': exclude}})\n return rtn\n <mask token>\n\n def _has_blocks_to_place(self, exclude=None):\n \"\"\"Return whether or not the bot can place a block from the\n inventory. If exclude is None, any block can be placed.\"\"\"\n for block_ in self._inventory:\n if block_ != exclude:\n return True\n return False\n <mask token>\n <mask token>\n\n\nclass _ImaginaryBot(_GenericBot):\n \"\"\"A bot used for finding paths that doesn't actually change blocks\n in the world.\"\"\"\n\n def __init__(self, pos, inventory=None):\n \"\"\"Create a new bot.\"\"\"\n _GenericBot.__init__(self, pos, inventory)\n self._changes = {}\n\n def _set_block(self, pos, block_):\n \"\"\"Set a block. block_ is the block id.\"\"\"\n self._changes[deepcopy(pos)] = block\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n if pos in self._changes:\n return self._changes[pos]\n else:\n return _get_mc().getBlock(pos)\n\n def get_block(self, pos):\n \"\"\"The public version.\"\"\"\n return self._get_block(pos)\n\n def __hash__(self):\n \"\"\"Return the hash.\"\"\"\n return hash(frozenset([self._pos] + _key_vals(self._inventory) +\n _key_vals(self._changes)))\n\n\nclass Bot(_GenericBot):\n \"\"\"The real bot.\n\n All vector arguments are Vec3s.\"\"\"\n _BOT_BLOCK = block.IRON_BLOCK.id\n\n def __init__(self):\n \"\"\"Create a bot next to the player.\"\"\"\n pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0)\n pos = _Vec3(pos.x, pos.y, pos.z)\n _GenericBot.__init__(self, pos)\n self._pos = pos\n self._move(self._pos)\n\n @staticmethod\n def destroy_all():\n \"\"\"Destroy all bots within a small distance (in case I forget to\n destroy one).\"\"\"\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)\n\n def destroy(self):\n \"\"\"Set itself to air.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n\n def fetch(self, block_name):\n \"\"\"Mine and return a block to the player.\"\"\"\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n block_id = getattr(block, block_name).id\n block_loc = self._get_block_loc(block_id)\n mine_prob = _MineProblem(imag_bot, block_loc, block_id)\n mine_actions = astar(mine_prob, _mine_heuristic)\n self.take_actions(mine_actions, _DELAY)\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n player_loc = _player_loc()\n return_prob = _ReturnProblem(imag_bot, block_id, player_loc)\n return_actions = astar(return_prob, _return_heuristic)\n imag_bot.take_actions(return_actions)\n return_actions.append({'func': '_place', 'args': (imag_bot.get_pos(\n ) + player_loc) / 2, 'kwargs': {'block': block_id}})\n self.take_actions(return_actions, _DELAY)\n\n def _get_block_loc(self, block_id):\n \"\"\"Return the location of the block.\"\"\"\n find_prob = FindProblem(self._pos, block_id)\n dirs = bfs(find_prob)\n return self._pos + sum(dirs)\n\n def _set_block(self, pos, block_):\n \"\"\"Place an actual block in the world.\n\n block is a block id.\"\"\"\n _get_mc().setBlock(pos, block_)\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n return _get_mc().getBlock(pos)\n\n def _move(self, pos):\n \"\"\"Move there, and set the appropriate blocks.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n self._set_block(pos, self._BOT_BLOCK)\n self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK)\n self._pos = pos\n\n\nclass FindProblem(SearchProblem):\n \"\"\"Problem for finding the location of a block in the world.\n\n A state in this problem is a location.\n \"\"\"\n\n def __init__(self, start_loc, block_id):\n \"\"\"Initialize.\"\"\"\n self._start_loc = deepcopy(start_loc)\n self._block_id = block_id\n\n def getStartState(self):\n \"\"\"Return the starting location.\"\"\"\n return self._start_loc\n\n def isGoalState(self, state):\n return _get_mc().getBlock(state) == self._block_id\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for dir_ in _all_dirs():\n successor = state + dir_\n if successor.y <= _get_mc().getHeight(successor.x, successor.z\n ) and _get_mc().getBlock(successor) != _BEDROCK:\n rtn.append((successor, dir_, 1))\n return rtn\n\n\nclass _MineProblem(SearchProblem):\n \"\"\"The problem of finding the block and mining it (not returning\n it).\"\"\"\n\n def __init__(self, imag_bot, block_loc, block_id):\n \"\"\"Initialize the problem with an _ImaginaryBot.\n\n block_loc is a Vec3.\n \"\"\"\n self._bot = imag_bot\n self._block_loc = deepcopy(block_loc)\n self._block_id = block_id\n\n def get_block_loc(self):\n \"\"\"Return the block location.\"\"\"\n return deepcopy(self._block_loc)\n\n def get_block_id(self):\n \"\"\"Return the block it's trying to mine.\"\"\"\n return self._block_id\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot has the block.\"\"\"\n return state.contains(self._block_id)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions():\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\nclass _ReturnProblem(SearchProblem):\n \"\"\"The problem of returning to the player. This does not place the block\n next to the player.\"\"\"\n\n def __init__(self, imag_bot, block_, player_loc):\n \"\"\"Initialized the problem with an _ImaginaryBot.\n\n block is a block id.\"\"\"\n self._bot = imag_bot\n self._block = block_\n self._player_loc = player_loc\n\n def get_player_loc(self):\n \"\"\"Return the player location.\"\"\"\n return deepcopy(self._player_loc)\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot is next to the player.\"\"\"\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x\n ) + abs(diff.z) == 2 and state.get_block(self._player_loc + \n diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions(self._block):\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass _GenericBot:\n <mask token>\n\n def __init__(self, pos, inventory=None):\n \"\"\"Initialize with an empty inventory.\n\n inventory is a dictionary. If None, an empty one will be used.\"\"\"\n if inventory is None:\n self._inventory = {}\n else:\n self._inventory = deepcopy(inventory)\n self._pos = deepcopy(pos)\n\n def take_action(self, action):\n \"\"\"Take the action (acquired from _get_legal_actions).\"\"\"\n getattr(self, action['func'])(*action.get('args', ()), **action.get\n ('kwargs', {}))\n\n def take_actions(self, actions, seconds=None):\n \"\"\"Take these actions. If seconds is not None, sleep 'seconds' \n seconds.\n \"\"\"\n if not actions:\n return\n self.take_action(actions[0])\n for action in actions[1:]:\n if seconds is not None:\n sleep(seconds)\n self.take_action(action)\n\n def get_pos(self):\n \"\"\"Return the position.\"\"\"\n return deepcopy(self._pos)\n\n def get_legal_actions(self, block_=None):\n \"\"\"Return a list of legal actions.\n\n If block_ is None, return all legal actions. Otherwise, return all\n legal actions that don't involve placing the block.\"\"\"\n return self._get_move_actions(block_) + self._get_mine_actions(\n ) + self._get_placement_actions(block_)\n <mask token>\n <mask token>\n\n def _place(self, loc, exclude=None, block_=None):\n \"\"\"Place a block from the inventory only.\n\n If exclude is not None, place a block that is not 'exclude'.\n If block is not None, place that block only.\n \"\"\"\n if not self._inventory:\n raise Exception('Inventory empty')\n if block_ is None:\n for key in self._inventory:\n if key != exclude:\n block_ = key\n break\n else:\n raise Exception(\n 'You requested not to place %s, but it is the only block in the inventory.'\n % exclude)\n if block_ not in self._inventory:\n raise Exception('Block %s is not in the inventory' % block_)\n if self._inventory[block_] == 1:\n del self._inventory[block_]\n else:\n self._inventory[block_] -= 1\n self._set_block(loc, block_)\n\n def _move_down(self):\n \"\"\"Move and mine the block below.\"\"\"\n new_pos = self._pos + _Vec3(0, -1, 0)\n block_ = self._get_block(new_pos)\n if block_ != _WATER:\n self._add_to_inv(block_)\n self._move(new_pos)\n <mask token>\n <mask token>\n\n def _mine(self, loc):\n \"\"\"Mine the block.\"\"\"\n block_ = self._get_block(loc)\n self._add_to_inv(block_)\n self._set_block(loc, _AIR)\n\n def _get_move_actions(self, exclude=None):\n \"\"\"Return a list of legal movement actions.\n\n exclude is the block to exclude.\n \"\"\"\n rtn = []\n can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR,\n _WATER}\n if can_move_up:\n if self._surrounded():\n rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, \n 1, 0),)})\n else:\n rtn.append({'func': '_move_up', 'args': (exclude,)})\n hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0))\n if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}:\n rtn.append({'func': '_move_down'})\n for dir_ in _adj_dirs():\n rtn.extend(self._side_moves(dir_, can_move_up))\n return rtn\n\n def _side_moves(self, dir_, can_move_up):\n \"\"\"Return the list of side moves.\n\n dir_ is an adjacent direction.\n can_move_up is a boolean for whether or not the bot can move up.\n \"\"\"\n rtn = []\n base_pos = self._pos + dir_\n base_block = self._get_block(base_pos)\n empty_blocks = {_AIR, _WATER}\n if can_move_up and base_block not in {_AIR, _LAVA, _WATER}:\n for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1,\n 0),)})\n for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n pos = base_pos + _Vec3(0, -1, 0)\n for _ in xrange(_DROP_PLUS_1):\n block_ = self._get_block(pos)\n if block_ != _AIR:\n if block_ != _LAVA:\n rtn.append({'func': '_move', 'args': (pos + _Vec3(0,\n 1, 0),)})\n break\n pos.y -= 1\n <mask token>\n\n def _get_mine_actions(self):\n \"\"\"Return a list of legal mining actions (that only involve mining\n and not moving).\"\"\"\n rtn = []\n dont_mine = {_AIR, _WATER, _LAVA}\n pos_above = self._pos + _Vec3(0, 2, 0)\n if self._get_block(pos_above) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos_above,)})\n for dir_ in _adj_dirs():\n pos = self._pos + dir_\n for _ in xrange(2):\n if self._get_block(pos) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos,)})\n pos = pos + _Vec3(0, 1, 0)\n return rtn\n\n def _get_placement_actions(self, exclude=None):\n \"\"\"Return a list of legal actions that only involve placing a block\n from the inventory.\n\n exclude is a block id. It is the block that should not be placed. If None,\n any block can be placed.\"\"\"\n if not self._has_blocks_to_place(exclude=exclude):\n return []\n dirs = [_Vec3(0, 2, 0)]\n for dir_ in _adj_dirs():\n dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)])\n if self._get_block(self._pos + dir_) in [_AIR, _WATER]:\n dirs.append(dir_ + _Vec3(0, -1, 0))\n rtn = []\n for dir_ in dirs:\n pos = self._pos + dir_\n if self._can_place(pos):\n rtn.append({'func': '_place', 'args': (pos,), 'kwargs': {\n 'exclude': exclude}})\n return rtn\n <mask token>\n\n def _has_blocks_to_place(self, exclude=None):\n \"\"\"Return whether or not the bot can place a block from the\n inventory. If exclude is None, any block can be placed.\"\"\"\n for block_ in self._inventory:\n if block_ != exclude:\n return True\n return False\n <mask token>\n <mask token>\n\n\nclass _ImaginaryBot(_GenericBot):\n \"\"\"A bot used for finding paths that doesn't actually change blocks\n in the world.\"\"\"\n\n def __init__(self, pos, inventory=None):\n \"\"\"Create a new bot.\"\"\"\n _GenericBot.__init__(self, pos, inventory)\n self._changes = {}\n\n def _set_block(self, pos, block_):\n \"\"\"Set a block. block_ is the block id.\"\"\"\n self._changes[deepcopy(pos)] = block\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n if pos in self._changes:\n return self._changes[pos]\n else:\n return _get_mc().getBlock(pos)\n\n def get_block(self, pos):\n \"\"\"The public version.\"\"\"\n return self._get_block(pos)\n\n def __hash__(self):\n \"\"\"Return the hash.\"\"\"\n return hash(frozenset([self._pos] + _key_vals(self._inventory) +\n _key_vals(self._changes)))\n\n\nclass Bot(_GenericBot):\n \"\"\"The real bot.\n\n All vector arguments are Vec3s.\"\"\"\n _BOT_BLOCK = block.IRON_BLOCK.id\n\n def __init__(self):\n \"\"\"Create a bot next to the player.\"\"\"\n pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0)\n pos = _Vec3(pos.x, pos.y, pos.z)\n _GenericBot.__init__(self, pos)\n self._pos = pos\n self._move(self._pos)\n\n @staticmethod\n def destroy_all():\n \"\"\"Destroy all bots within a small distance (in case I forget to\n destroy one).\"\"\"\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)\n\n def destroy(self):\n \"\"\"Set itself to air.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n\n def fetch(self, block_name):\n \"\"\"Mine and return a block to the player.\"\"\"\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n block_id = getattr(block, block_name).id\n block_loc = self._get_block_loc(block_id)\n mine_prob = _MineProblem(imag_bot, block_loc, block_id)\n mine_actions = astar(mine_prob, _mine_heuristic)\n self.take_actions(mine_actions, _DELAY)\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n player_loc = _player_loc()\n return_prob = _ReturnProblem(imag_bot, block_id, player_loc)\n return_actions = astar(return_prob, _return_heuristic)\n imag_bot.take_actions(return_actions)\n return_actions.append({'func': '_place', 'args': (imag_bot.get_pos(\n ) + player_loc) / 2, 'kwargs': {'block': block_id}})\n self.take_actions(return_actions, _DELAY)\n\n def _get_block_loc(self, block_id):\n \"\"\"Return the location of the block.\"\"\"\n find_prob = FindProblem(self._pos, block_id)\n dirs = bfs(find_prob)\n return self._pos + sum(dirs)\n\n def _set_block(self, pos, block_):\n \"\"\"Place an actual block in the world.\n\n block is a block id.\"\"\"\n _get_mc().setBlock(pos, block_)\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n return _get_mc().getBlock(pos)\n\n def _move(self, pos):\n \"\"\"Move there, and set the appropriate blocks.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n self._set_block(pos, self._BOT_BLOCK)\n self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK)\n self._pos = pos\n\n\nclass FindProblem(SearchProblem):\n \"\"\"Problem for finding the location of a block in the world.\n\n A state in this problem is a location.\n \"\"\"\n\n def __init__(self, start_loc, block_id):\n \"\"\"Initialize.\"\"\"\n self._start_loc = deepcopy(start_loc)\n self._block_id = block_id\n\n def getStartState(self):\n \"\"\"Return the starting location.\"\"\"\n return self._start_loc\n\n def isGoalState(self, state):\n return _get_mc().getBlock(state) == self._block_id\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for dir_ in _all_dirs():\n successor = state + dir_\n if successor.y <= _get_mc().getHeight(successor.x, successor.z\n ) and _get_mc().getBlock(successor) != _BEDROCK:\n rtn.append((successor, dir_, 1))\n return rtn\n\n\nclass _MineProblem(SearchProblem):\n \"\"\"The problem of finding the block and mining it (not returning\n it).\"\"\"\n\n def __init__(self, imag_bot, block_loc, block_id):\n \"\"\"Initialize the problem with an _ImaginaryBot.\n\n block_loc is a Vec3.\n \"\"\"\n self._bot = imag_bot\n self._block_loc = deepcopy(block_loc)\n self._block_id = block_id\n\n def get_block_loc(self):\n \"\"\"Return the block location.\"\"\"\n return deepcopy(self._block_loc)\n\n def get_block_id(self):\n \"\"\"Return the block it's trying to mine.\"\"\"\n return self._block_id\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot has the block.\"\"\"\n return state.contains(self._block_id)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions():\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\nclass _ReturnProblem(SearchProblem):\n \"\"\"The problem of returning to the player. This does not place the block\n next to the player.\"\"\"\n\n def __init__(self, imag_bot, block_, player_loc):\n \"\"\"Initialized the problem with an _ImaginaryBot.\n\n block is a block id.\"\"\"\n self._bot = imag_bot\n self._block = block_\n self._player_loc = player_loc\n\n def get_player_loc(self):\n \"\"\"Return the player location.\"\"\"\n return deepcopy(self._player_loc)\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot is next to the player.\"\"\"\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x\n ) + abs(diff.z) == 2 and state.get_block(self._player_loc + \n diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions(self._block):\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass _GenericBot:\n <mask token>\n\n def __init__(self, pos, inventory=None):\n \"\"\"Initialize with an empty inventory.\n\n inventory is a dictionary. If None, an empty one will be used.\"\"\"\n if inventory is None:\n self._inventory = {}\n else:\n self._inventory = deepcopy(inventory)\n self._pos = deepcopy(pos)\n\n def take_action(self, action):\n \"\"\"Take the action (acquired from _get_legal_actions).\"\"\"\n getattr(self, action['func'])(*action.get('args', ()), **action.get\n ('kwargs', {}))\n\n def take_actions(self, actions, seconds=None):\n \"\"\"Take these actions. If seconds is not None, sleep 'seconds' \n seconds.\n \"\"\"\n if not actions:\n return\n self.take_action(actions[0])\n for action in actions[1:]:\n if seconds is not None:\n sleep(seconds)\n self.take_action(action)\n\n def get_pos(self):\n \"\"\"Return the position.\"\"\"\n return deepcopy(self._pos)\n\n def get_legal_actions(self, block_=None):\n \"\"\"Return a list of legal actions.\n\n If block_ is None, return all legal actions. Otherwise, return all\n legal actions that don't involve placing the block.\"\"\"\n return self._get_move_actions(block_) + self._get_mine_actions(\n ) + self._get_placement_actions(block_)\n\n def contains(self, block_):\n \"\"\"Return whether or not the bot contains the block id.\"\"\"\n return block_ in self._inventory\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n raise NotImplementedError\n\n def _place(self, loc, exclude=None, block_=None):\n \"\"\"Place a block from the inventory only.\n\n If exclude is not None, place a block that is not 'exclude'.\n If block is not None, place that block only.\n \"\"\"\n if not self._inventory:\n raise Exception('Inventory empty')\n if block_ is None:\n for key in self._inventory:\n if key != exclude:\n block_ = key\n break\n else:\n raise Exception(\n 'You requested not to place %s, but it is the only block in the inventory.'\n % exclude)\n if block_ not in self._inventory:\n raise Exception('Block %s is not in the inventory' % block_)\n if self._inventory[block_] == 1:\n del self._inventory[block_]\n else:\n self._inventory[block_] -= 1\n self._set_block(loc, block_)\n\n def _move_down(self):\n \"\"\"Move and mine the block below.\"\"\"\n new_pos = self._pos + _Vec3(0, -1, 0)\n block_ = self._get_block(new_pos)\n if block_ != _WATER:\n self._add_to_inv(block_)\n self._move(new_pos)\n <mask token>\n\n def _move_up(self, exclude=None):\n \"\"\"Move and place a block below.\n\n If exclude is not None, place a block that is not 'exclude'.\n \"\"\"\n self._move(self._pos + _Vec3(0, 1, 0))\n self._place(self._pos + _Vec3(0, -1, 0), exclude)\n\n def _mine(self, loc):\n \"\"\"Mine the block.\"\"\"\n block_ = self._get_block(loc)\n self._add_to_inv(block_)\n self._set_block(loc, _AIR)\n\n def _get_move_actions(self, exclude=None):\n \"\"\"Return a list of legal movement actions.\n\n exclude is the block to exclude.\n \"\"\"\n rtn = []\n can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR,\n _WATER}\n if can_move_up:\n if self._surrounded():\n rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, \n 1, 0),)})\n else:\n rtn.append({'func': '_move_up', 'args': (exclude,)})\n hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0))\n if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}:\n rtn.append({'func': '_move_down'})\n for dir_ in _adj_dirs():\n rtn.extend(self._side_moves(dir_, can_move_up))\n return rtn\n\n def _side_moves(self, dir_, can_move_up):\n \"\"\"Return the list of side moves.\n\n dir_ is an adjacent direction.\n can_move_up is a boolean for whether or not the bot can move up.\n \"\"\"\n rtn = []\n base_pos = self._pos + dir_\n base_block = self._get_block(base_pos)\n empty_blocks = {_AIR, _WATER}\n if can_move_up and base_block not in {_AIR, _LAVA, _WATER}:\n for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1,\n 0),)})\n for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n pos = base_pos + _Vec3(0, -1, 0)\n for _ in xrange(_DROP_PLUS_1):\n block_ = self._get_block(pos)\n if block_ != _AIR:\n if block_ != _LAVA:\n rtn.append({'func': '_move', 'args': (pos + _Vec3(0,\n 1, 0),)})\n break\n pos.y -= 1\n\n def _surrounded(self):\n \"\"\"Return whether or not the bot is surrounded by water.\"\"\"\n for dir_ in _adj_dirs():\n if self._get_block(self._pos + dir_) != _WATER:\n return False\n return True\n\n def _get_mine_actions(self):\n \"\"\"Return a list of legal mining actions (that only involve mining\n and not moving).\"\"\"\n rtn = []\n dont_mine = {_AIR, _WATER, _LAVA}\n pos_above = self._pos + _Vec3(0, 2, 0)\n if self._get_block(pos_above) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos_above,)})\n for dir_ in _adj_dirs():\n pos = self._pos + dir_\n for _ in xrange(2):\n if self._get_block(pos) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos,)})\n pos = pos + _Vec3(0, 1, 0)\n return rtn\n\n def _get_placement_actions(self, exclude=None):\n \"\"\"Return a list of legal actions that only involve placing a block\n from the inventory.\n\n exclude is a block id. It is the block that should not be placed. If None,\n any block can be placed.\"\"\"\n if not self._has_blocks_to_place(exclude=exclude):\n return []\n dirs = [_Vec3(0, 2, 0)]\n for dir_ in _adj_dirs():\n dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)])\n if self._get_block(self._pos + dir_) in [_AIR, _WATER]:\n dirs.append(dir_ + _Vec3(0, -1, 0))\n rtn = []\n for dir_ in dirs:\n pos = self._pos + dir_\n if self._can_place(pos):\n rtn.append({'func': '_place', 'args': (pos,), 'kwargs': {\n 'exclude': exclude}})\n return rtn\n\n def _can_place(self, loc):\n \"\"\"Return whether or not the bot can place a block at that location\n independent of what it has in its inventory.\"\"\"\n non_blocks = [_AIR, _WATER, _LAVA]\n player = [self._pos, self._pos + _Vec3(0, 1, 0)]\n for dir_ in (_adj_dirs + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)]):\n new_loc = loc + dir_\n if new_loc not in player and self._get_block(new_loc\n ) not in non_blocks:\n return True\n return False\n\n def _has_blocks_to_place(self, exclude=None):\n \"\"\"Return whether or not the bot can place a block from the\n inventory. If exclude is None, any block can be placed.\"\"\"\n for block_ in self._inventory:\n if block_ != exclude:\n return True\n return False\n <mask token>\n <mask token>\n\n\nclass _ImaginaryBot(_GenericBot):\n \"\"\"A bot used for finding paths that doesn't actually change blocks\n in the world.\"\"\"\n\n def __init__(self, pos, inventory=None):\n \"\"\"Create a new bot.\"\"\"\n _GenericBot.__init__(self, pos, inventory)\n self._changes = {}\n\n def _set_block(self, pos, block_):\n \"\"\"Set a block. block_ is the block id.\"\"\"\n self._changes[deepcopy(pos)] = block\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n if pos in self._changes:\n return self._changes[pos]\n else:\n return _get_mc().getBlock(pos)\n\n def get_block(self, pos):\n \"\"\"The public version.\"\"\"\n return self._get_block(pos)\n\n def __hash__(self):\n \"\"\"Return the hash.\"\"\"\n return hash(frozenset([self._pos] + _key_vals(self._inventory) +\n _key_vals(self._changes)))\n\n\nclass Bot(_GenericBot):\n \"\"\"The real bot.\n\n All vector arguments are Vec3s.\"\"\"\n _BOT_BLOCK = block.IRON_BLOCK.id\n\n def __init__(self):\n \"\"\"Create a bot next to the player.\"\"\"\n pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0)\n pos = _Vec3(pos.x, pos.y, pos.z)\n _GenericBot.__init__(self, pos)\n self._pos = pos\n self._move(self._pos)\n\n @staticmethod\n def destroy_all():\n \"\"\"Destroy all bots within a small distance (in case I forget to\n destroy one).\"\"\"\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)\n\n def destroy(self):\n \"\"\"Set itself to air.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n\n def fetch(self, block_name):\n \"\"\"Mine and return a block to the player.\"\"\"\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n block_id = getattr(block, block_name).id\n block_loc = self._get_block_loc(block_id)\n mine_prob = _MineProblem(imag_bot, block_loc, block_id)\n mine_actions = astar(mine_prob, _mine_heuristic)\n self.take_actions(mine_actions, _DELAY)\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n player_loc = _player_loc()\n return_prob = _ReturnProblem(imag_bot, block_id, player_loc)\n return_actions = astar(return_prob, _return_heuristic)\n imag_bot.take_actions(return_actions)\n return_actions.append({'func': '_place', 'args': (imag_bot.get_pos(\n ) + player_loc) / 2, 'kwargs': {'block': block_id}})\n self.take_actions(return_actions, _DELAY)\n\n def _get_block_loc(self, block_id):\n \"\"\"Return the location of the block.\"\"\"\n find_prob = FindProblem(self._pos, block_id)\n dirs = bfs(find_prob)\n return self._pos + sum(dirs)\n\n def _set_block(self, pos, block_):\n \"\"\"Place an actual block in the world.\n\n block is a block id.\"\"\"\n _get_mc().setBlock(pos, block_)\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n return _get_mc().getBlock(pos)\n\n def _move(self, pos):\n \"\"\"Move there, and set the appropriate blocks.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n self._set_block(pos, self._BOT_BLOCK)\n self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK)\n self._pos = pos\n\n\nclass FindProblem(SearchProblem):\n \"\"\"Problem for finding the location of a block in the world.\n\n A state in this problem is a location.\n \"\"\"\n\n def __init__(self, start_loc, block_id):\n \"\"\"Initialize.\"\"\"\n self._start_loc = deepcopy(start_loc)\n self._block_id = block_id\n\n def getStartState(self):\n \"\"\"Return the starting location.\"\"\"\n return self._start_loc\n\n def isGoalState(self, state):\n return _get_mc().getBlock(state) == self._block_id\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for dir_ in _all_dirs():\n successor = state + dir_\n if successor.y <= _get_mc().getHeight(successor.x, successor.z\n ) and _get_mc().getBlock(successor) != _BEDROCK:\n rtn.append((successor, dir_, 1))\n return rtn\n\n\nclass _MineProblem(SearchProblem):\n \"\"\"The problem of finding the block and mining it (not returning\n it).\"\"\"\n\n def __init__(self, imag_bot, block_loc, block_id):\n \"\"\"Initialize the problem with an _ImaginaryBot.\n\n block_loc is a Vec3.\n \"\"\"\n self._bot = imag_bot\n self._block_loc = deepcopy(block_loc)\n self._block_id = block_id\n\n def get_block_loc(self):\n \"\"\"Return the block location.\"\"\"\n return deepcopy(self._block_loc)\n\n def get_block_id(self):\n \"\"\"Return the block it's trying to mine.\"\"\"\n return self._block_id\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot has the block.\"\"\"\n return state.contains(self._block_id)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions():\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\nclass _ReturnProblem(SearchProblem):\n \"\"\"The problem of returning to the player. This does not place the block\n next to the player.\"\"\"\n\n def __init__(self, imag_bot, block_, player_loc):\n \"\"\"Initialized the problem with an _ImaginaryBot.\n\n block is a block id.\"\"\"\n self._bot = imag_bot\n self._block = block_\n self._player_loc = player_loc\n\n def get_player_loc(self):\n \"\"\"Return the player location.\"\"\"\n return deepcopy(self._player_loc)\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot is next to the player.\"\"\"\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x\n ) + abs(diff.z) == 2 and state.get_block(self._player_loc + \n diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions(self._block):\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass _GenericBot:\n <mask token>\n\n def __init__(self, pos, inventory=None):\n \"\"\"Initialize with an empty inventory.\n\n inventory is a dictionary. If None, an empty one will be used.\"\"\"\n if inventory is None:\n self._inventory = {}\n else:\n self._inventory = deepcopy(inventory)\n self._pos = deepcopy(pos)\n\n def take_action(self, action):\n \"\"\"Take the action (acquired from _get_legal_actions).\"\"\"\n getattr(self, action['func'])(*action.get('args', ()), **action.get\n ('kwargs', {}))\n\n def take_actions(self, actions, seconds=None):\n \"\"\"Take these actions. If seconds is not None, sleep 'seconds' \n seconds.\n \"\"\"\n if not actions:\n return\n self.take_action(actions[0])\n for action in actions[1:]:\n if seconds is not None:\n sleep(seconds)\n self.take_action(action)\n\n def get_pos(self):\n \"\"\"Return the position.\"\"\"\n return deepcopy(self._pos)\n\n def get_legal_actions(self, block_=None):\n \"\"\"Return a list of legal actions.\n\n If block_ is None, return all legal actions. Otherwise, return all\n legal actions that don't involve placing the block.\"\"\"\n return self._get_move_actions(block_) + self._get_mine_actions(\n ) + self._get_placement_actions(block_)\n\n def contains(self, block_):\n \"\"\"Return whether or not the bot contains the block id.\"\"\"\n return block_ in self._inventory\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n raise NotImplementedError\n\n def _place(self, loc, exclude=None, block_=None):\n \"\"\"Place a block from the inventory only.\n\n If exclude is not None, place a block that is not 'exclude'.\n If block is not None, place that block only.\n \"\"\"\n if not self._inventory:\n raise Exception('Inventory empty')\n if block_ is None:\n for key in self._inventory:\n if key != exclude:\n block_ = key\n break\n else:\n raise Exception(\n 'You requested not to place %s, but it is the only block in the inventory.'\n % exclude)\n if block_ not in self._inventory:\n raise Exception('Block %s is not in the inventory' % block_)\n if self._inventory[block_] == 1:\n del self._inventory[block_]\n else:\n self._inventory[block_] -= 1\n self._set_block(loc, block_)\n\n def _move_down(self):\n \"\"\"Move and mine the block below.\"\"\"\n new_pos = self._pos + _Vec3(0, -1, 0)\n block_ = self._get_block(new_pos)\n if block_ != _WATER:\n self._add_to_inv(block_)\n self._move(new_pos)\n <mask token>\n\n def _move_up(self, exclude=None):\n \"\"\"Move and place a block below.\n\n If exclude is not None, place a block that is not 'exclude'.\n \"\"\"\n self._move(self._pos + _Vec3(0, 1, 0))\n self._place(self._pos + _Vec3(0, -1, 0), exclude)\n\n def _mine(self, loc):\n \"\"\"Mine the block.\"\"\"\n block_ = self._get_block(loc)\n self._add_to_inv(block_)\n self._set_block(loc, _AIR)\n\n def _get_move_actions(self, exclude=None):\n \"\"\"Return a list of legal movement actions.\n\n exclude is the block to exclude.\n \"\"\"\n rtn = []\n can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR,\n _WATER}\n if can_move_up:\n if self._surrounded():\n rtn.append({'func': '_move', 'args': (self._pos + _Vec3(0, \n 1, 0),)})\n else:\n rtn.append({'func': '_move_up', 'args': (exclude,)})\n hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0))\n if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}:\n rtn.append({'func': '_move_down'})\n for dir_ in _adj_dirs():\n rtn.extend(self._side_moves(dir_, can_move_up))\n return rtn\n\n def _side_moves(self, dir_, can_move_up):\n \"\"\"Return the list of side moves.\n\n dir_ is an adjacent direction.\n can_move_up is a boolean for whether or not the bot can move up.\n \"\"\"\n rtn = []\n base_pos = self._pos + dir_\n base_block = self._get_block(base_pos)\n empty_blocks = {_AIR, _WATER}\n if can_move_up and base_block not in {_AIR, _LAVA, _WATER}:\n for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n rtn.append({'func': '_move', 'args': (base_pos + _Vec3(0, 1,\n 0),)})\n for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n pos = base_pos + _Vec3(0, -1, 0)\n for _ in xrange(_DROP_PLUS_1):\n block_ = self._get_block(pos)\n if block_ != _AIR:\n if block_ != _LAVA:\n rtn.append({'func': '_move', 'args': (pos + _Vec3(0,\n 1, 0),)})\n break\n pos.y -= 1\n\n def _surrounded(self):\n \"\"\"Return whether or not the bot is surrounded by water.\"\"\"\n for dir_ in _adj_dirs():\n if self._get_block(self._pos + dir_) != _WATER:\n return False\n return True\n\n def _get_mine_actions(self):\n \"\"\"Return a list of legal mining actions (that only involve mining\n and not moving).\"\"\"\n rtn = []\n dont_mine = {_AIR, _WATER, _LAVA}\n pos_above = self._pos + _Vec3(0, 2, 0)\n if self._get_block(pos_above) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos_above,)})\n for dir_ in _adj_dirs():\n pos = self._pos + dir_\n for _ in xrange(2):\n if self._get_block(pos) not in dont_mine:\n rtn.append({'func': '_mine', 'args': (pos,)})\n pos = pos + _Vec3(0, 1, 0)\n return rtn\n\n def _get_placement_actions(self, exclude=None):\n \"\"\"Return a list of legal actions that only involve placing a block\n from the inventory.\n\n exclude is a block id. It is the block that should not be placed. If None,\n any block can be placed.\"\"\"\n if not self._has_blocks_to_place(exclude=exclude):\n return []\n dirs = [_Vec3(0, 2, 0)]\n for dir_ in _adj_dirs():\n dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)])\n if self._get_block(self._pos + dir_) in [_AIR, _WATER]:\n dirs.append(dir_ + _Vec3(0, -1, 0))\n rtn = []\n for dir_ in dirs:\n pos = self._pos + dir_\n if self._can_place(pos):\n rtn.append({'func': '_place', 'args': (pos,), 'kwargs': {\n 'exclude': exclude}})\n return rtn\n\n def _can_place(self, loc):\n \"\"\"Return whether or not the bot can place a block at that location\n independent of what it has in its inventory.\"\"\"\n non_blocks = [_AIR, _WATER, _LAVA]\n player = [self._pos, self._pos + _Vec3(0, 1, 0)]\n for dir_ in (_adj_dirs + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)]):\n new_loc = loc + dir_\n if new_loc not in player and self._get_block(new_loc\n ) not in non_blocks:\n return True\n return False\n\n def _has_blocks_to_place(self, exclude=None):\n \"\"\"Return whether or not the bot can place a block from the\n inventory. If exclude is None, any block can be placed.\"\"\"\n for block_ in self._inventory:\n if block_ != exclude:\n return True\n return False\n\n def _set_block(self, pos, block_):\n \"\"\"Set a block. block_ is the block id.\"\"\"\n raise NotImplementedError\n\n def _move(self, pos):\n \"\"\"Move there only.\"\"\"\n self._pos = deepcopy(pos)\n\n\nclass _ImaginaryBot(_GenericBot):\n \"\"\"A bot used for finding paths that doesn't actually change blocks\n in the world.\"\"\"\n\n def __init__(self, pos, inventory=None):\n \"\"\"Create a new bot.\"\"\"\n _GenericBot.__init__(self, pos, inventory)\n self._changes = {}\n\n def _set_block(self, pos, block_):\n \"\"\"Set a block. block_ is the block id.\"\"\"\n self._changes[deepcopy(pos)] = block\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n if pos in self._changes:\n return self._changes[pos]\n else:\n return _get_mc().getBlock(pos)\n\n def get_block(self, pos):\n \"\"\"The public version.\"\"\"\n return self._get_block(pos)\n\n def __hash__(self):\n \"\"\"Return the hash.\"\"\"\n return hash(frozenset([self._pos] + _key_vals(self._inventory) +\n _key_vals(self._changes)))\n\n\nclass Bot(_GenericBot):\n \"\"\"The real bot.\n\n All vector arguments are Vec3s.\"\"\"\n _BOT_BLOCK = block.IRON_BLOCK.id\n\n def __init__(self):\n \"\"\"Create a bot next to the player.\"\"\"\n pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0)\n pos = _Vec3(pos.x, pos.y, pos.z)\n _GenericBot.__init__(self, pos)\n self._pos = pos\n self._move(self._pos)\n\n @staticmethod\n def destroy_all():\n \"\"\"Destroy all bots within a small distance (in case I forget to\n destroy one).\"\"\"\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)\n\n def destroy(self):\n \"\"\"Set itself to air.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n\n def fetch(self, block_name):\n \"\"\"Mine and return a block to the player.\"\"\"\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n block_id = getattr(block, block_name).id\n block_loc = self._get_block_loc(block_id)\n mine_prob = _MineProblem(imag_bot, block_loc, block_id)\n mine_actions = astar(mine_prob, _mine_heuristic)\n self.take_actions(mine_actions, _DELAY)\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n player_loc = _player_loc()\n return_prob = _ReturnProblem(imag_bot, block_id, player_loc)\n return_actions = astar(return_prob, _return_heuristic)\n imag_bot.take_actions(return_actions)\n return_actions.append({'func': '_place', 'args': (imag_bot.get_pos(\n ) + player_loc) / 2, 'kwargs': {'block': block_id}})\n self.take_actions(return_actions, _DELAY)\n\n def _get_block_loc(self, block_id):\n \"\"\"Return the location of the block.\"\"\"\n find_prob = FindProblem(self._pos, block_id)\n dirs = bfs(find_prob)\n return self._pos + sum(dirs)\n\n def _set_block(self, pos, block_):\n \"\"\"Place an actual block in the world.\n\n block is a block id.\"\"\"\n _get_mc().setBlock(pos, block_)\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n return _get_mc().getBlock(pos)\n\n def _move(self, pos):\n \"\"\"Move there, and set the appropriate blocks.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n self._set_block(pos, self._BOT_BLOCK)\n self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK)\n self._pos = pos\n\n\nclass FindProblem(SearchProblem):\n \"\"\"Problem for finding the location of a block in the world.\n\n A state in this problem is a location.\n \"\"\"\n\n def __init__(self, start_loc, block_id):\n \"\"\"Initialize.\"\"\"\n self._start_loc = deepcopy(start_loc)\n self._block_id = block_id\n\n def getStartState(self):\n \"\"\"Return the starting location.\"\"\"\n return self._start_loc\n\n def isGoalState(self, state):\n return _get_mc().getBlock(state) == self._block_id\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for dir_ in _all_dirs():\n successor = state + dir_\n if successor.y <= _get_mc().getHeight(successor.x, successor.z\n ) and _get_mc().getBlock(successor) != _BEDROCK:\n rtn.append((successor, dir_, 1))\n return rtn\n\n\nclass _MineProblem(SearchProblem):\n \"\"\"The problem of finding the block and mining it (not returning\n it).\"\"\"\n\n def __init__(self, imag_bot, block_loc, block_id):\n \"\"\"Initialize the problem with an _ImaginaryBot.\n\n block_loc is a Vec3.\n \"\"\"\n self._bot = imag_bot\n self._block_loc = deepcopy(block_loc)\n self._block_id = block_id\n\n def get_block_loc(self):\n \"\"\"Return the block location.\"\"\"\n return deepcopy(self._block_loc)\n\n def get_block_id(self):\n \"\"\"Return the block it's trying to mine.\"\"\"\n return self._block_id\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot has the block.\"\"\"\n return state.contains(self._block_id)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions():\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\nclass _ReturnProblem(SearchProblem):\n \"\"\"The problem of returning to the player. This does not place the block\n next to the player.\"\"\"\n\n def __init__(self, imag_bot, block_, player_loc):\n \"\"\"Initialized the problem with an _ImaginaryBot.\n\n block is a block id.\"\"\"\n self._bot = imag_bot\n self._block = block_\n self._player_loc = player_loc\n\n def get_player_loc(self):\n \"\"\"Return the player location.\"\"\"\n return deepcopy(self._player_loc)\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot is next to the player.\"\"\"\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and abs(diff.x\n ) + abs(diff.z) == 2 and state.get_block(self._player_loc + \n diff / 2 + _Vec3(0, -1, 0)) not in (_AIR, _LAVA, _WATER)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions(self._block):\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\n<mask token>\n", "step-5": "\"\"\"Module for the bot\"\"\"\n\nfrom copy import deepcopy\nfrom time import sleep\n\nimport mcpi.minecraft as minecraft\nfrom mcpi.vec3 import Vec3\nimport mcpi.block as block\n\nfrom search import SearchProblem, astar, bfs\nfrom singleton import singleton\n\n_AIR = block.AIR.id\n_WATER = block.WATER.id\n_LAVA = block.LAVA.id\n_BEDROCK = block.BEDROCK.id\n\n_DROP = 2 # It can drop at most this many\n_DROP_PLUS_1 = _DROP + 1\n_DELAY = 1\n\n\nclass _Vec3(Vec3):\n \"\"\"A Vec3 that is hashable. Everything in this program should use this\n class.\"\"\"\n\n def __hash__(self):\n \"\"\"Return the hash.\"\"\"\n return hash((self.x, self.y, self.z))\n\n def clone(self):\n \"\"\"Return a clone.\"\"\"\n return _Vec3(self.x, self.y, self.z)\n\n\nclass _GenericBot:\n \"\"\"A generic bot.\"\"\"\n\n def __init__(self, pos, inventory=None):\n \"\"\"Initialize with an empty inventory.\n\n inventory is a dictionary. If None, an empty one will be used.\"\"\"\n if inventory is None:\n self._inventory = {}\n else:\n self._inventory = deepcopy(inventory)\n self._pos = deepcopy(pos)\n\n def take_action(self, action):\n \"\"\"Take the action (acquired from _get_legal_actions).\"\"\"\n getattr(self, action['func'])(\n *action.get('args', ()), \n **action.get('kwargs', {})\n )\n\n def take_actions(self, actions, seconds=None):\n \"\"\"Take these actions. If seconds is not None, sleep 'seconds' \n seconds.\n \"\"\"\n if not actions:\n return\n\n self.take_action(actions[0])\n for action in actions[1:]:\n if seconds is not None:\n sleep(seconds)\n self.take_action(action)\n\n def get_pos(self):\n \"\"\"Return the position.\"\"\"\n return deepcopy(self._pos)\n\n def get_legal_actions(self, block_=None):\n \"\"\"Return a list of legal actions.\n\n If block_ is None, return all legal actions. Otherwise, return all\n legal actions that don't involve placing the block.\"\"\"\n return self._get_move_actions(block_) + self._get_mine_actions() + \\\n self._get_placement_actions(block_)\n\n def contains(self, block_):\n \"\"\"Return whether or not the bot contains the block id.\"\"\"\n return block_ in self._inventory\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n raise NotImplementedError\n\n def _place(self, loc, exclude=None, block_=None):\n \"\"\"Place a block from the inventory only.\n\n If exclude is not None, place a block that is not 'exclude'.\n If block is not None, place that block only.\n \"\"\"\n if not self._inventory:\n raise Exception('Inventory empty')\n\n if block_ is None:\n for key in self._inventory:\n if key != exclude:\n block_ = key\n break\n else:\n raise Exception((\n 'You requested not to place %s, but it is the only '\n 'block in the inventory.' % exclude\n ))\n\n if block_ not in self._inventory:\n raise Exception('Block %s is not in the inventory' % block_)\n\n if self._inventory[block_] == 1:\n del self._inventory[block_]\n else:\n self._inventory[block_] -= 1\n\n self._set_block(loc, block_)\n \n\n def _move_down(self):\n \"\"\"Move and mine the block below.\"\"\"\n new_pos = self._pos + _Vec3(0, -1, 0)\n block_ = self._get_block(new_pos)\n if block_ != _WATER:\n self._add_to_inv(block_)\n self._move(new_pos)\n \n def _add_to_inv(self, block_):\n \"\"\"Add the block to the inventory.\"\"\"\n if block_ in self._inventory:\n self._inventory[block_] += 1\n else:\n self._inventory[block_] = 1\n\n def _move_up(self, exclude=None):\n \"\"\"Move and place a block below.\n\n If exclude is not None, place a block that is not 'exclude'.\n \"\"\"\n self._move(self._pos + _Vec3(0, 1, 0))\n self._place(self._pos + _Vec3(0, -1, 0), exclude)\n\n def _mine(self, loc):\n \"\"\"Mine the block.\"\"\"\n block_ = self._get_block(loc)\n self._add_to_inv(block_)\n self._set_block(loc, _AIR)\n\n def _get_move_actions(self, exclude=None):\n \"\"\"Return a list of legal movement actions.\n\n exclude is the block to exclude.\n \"\"\"\n rtn = []\n\n # Check for moving up\n can_move_up = self._get_block(self._pos + _Vec3(0, 2, 0)) in {_AIR, _WATER}\n if can_move_up:\n if self._surrounded():\n rtn.append({\n 'func': '_move',\n 'args': (self._pos + _Vec3(0, 1, 0),)\n })\n else:\n rtn.append({\n 'func': '_move_up',\n 'args': (exclude,)\n })\n\n # Check for moving down\n hidden_block = self._get_block(self._pos + _Vec3(0, -2, 0))\n if hidden_block == _WATER or hidden_block not in {_AIR, _LAVA}:\n rtn.append({'func': '_move_down'})\n\n # Check for side moves \n for dir_ in _adj_dirs():\n rtn.extend(self._side_moves(dir_, can_move_up))\n\n return rtn\n\n def _side_moves(self, dir_, can_move_up):\n \"\"\"Return the list of side moves.\n\n dir_ is an adjacent direction.\n can_move_up is a boolean for whether or not the bot can move up.\n \"\"\"\n rtn = []\n base_pos = self._pos + dir_\n base_block = self._get_block(base_pos)\n empty_blocks = {_AIR, _WATER}\n\n # Check if it can move up\n if can_move_up and base_block not in {_AIR, _LAVA, _WATER}:\n for vert_dir in [_Vec3(0, 1, 0), _Vec3(0, 2, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n else:\n rtn.append({\n 'func': '_move',\n 'args': (base_pos + _Vec3(0, 1, 0),)\n })\n\n # Check if it can move in that direction\n for vert_dir in [_Vec3(), _Vec3(0, 1, 0)]:\n if self._get_block(base_pos + vert_dir) not in empty_blocks:\n break\n\n # Fall\n else:\n pos = base_pos + _Vec3(0, -1, 0)\n for _ in xrange(_DROP_PLUS_1):\n block_ = self._get_block(pos)\n if block_ != _AIR:\n if block_ != _LAVA:\n rtn.append({\n 'func': '_move',\n 'args': (pos + _Vec3(0, 1, 0),)\n })\n break\n pos.y -= 1 \n \n def _surrounded(self):\n \"\"\"Return whether or not the bot is surrounded by water.\"\"\"\n for dir_ in _adj_dirs():\n if self._get_block(self._pos + dir_) != _WATER:\n return False\n return True\n\n def _get_mine_actions(self):\n \"\"\"Return a list of legal mining actions (that only involve mining\n and not moving).\"\"\"\n rtn = []\n dont_mine = {_AIR, _WATER, _LAVA}\n # Mine above.\n pos_above = self._pos + _Vec3(0, 2, 0)\n if self._get_block(pos_above) not in dont_mine:\n rtn.append({\n 'func': '_mine',\n 'args': (pos_above,)\n })\n\n for dir_ in _adj_dirs():\n pos = self._pos + dir_\n for _ in xrange(2):\n if self._get_block(pos) not in dont_mine:\n rtn.append({\n 'func': '_mine',\n 'args': (pos,)\n })\n pos = pos + _Vec3(0, 1, 0)\n\n return rtn\n\n def _get_placement_actions(self, exclude=None):\n \"\"\"Return a list of legal actions that only involve placing a block\n from the inventory.\n\n exclude is a block id. It is the block that should not be placed. If None,\n any block can be placed.\"\"\"\n if not self._has_blocks_to_place(exclude=exclude):\n return []\n\n dirs = [_Vec3(0, 2, 0)]\n for dir_ in _adj_dirs():\n dirs.extend([dir_, dir_ + _Vec3(0, 1, 0)])\n if self._get_block(self._pos + dir_) in [_AIR, _WATER]:\n dirs.append(dir_ + _Vec3(0, -1, 0))\n\n rtn = []\n for dir_ in dirs:\n pos = self._pos + dir_\n if self._can_place(pos):\n rtn.append({\n 'func': '_place',\n 'args': (pos,),\n 'kwargs': {'exclude': exclude}\n })\n\n return rtn\n\n def _can_place(self, loc):\n \"\"\"Return whether or not the bot can place a block at that location\n independent of what it has in its inventory.\"\"\"\n non_blocks = [_AIR, _WATER, _LAVA]\n player = [self._pos, self._pos + _Vec3(0, 1, 0)]\n for dir_ in _adj_dirs + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)]:\n new_loc = loc + dir_\n if new_loc not in player and self._get_block(new_loc) \\\n not in non_blocks:\n return True\n return False\n\n def _has_blocks_to_place(self, exclude=None):\n \"\"\"Return whether or not the bot can place a block from the\n inventory. If exclude is None, any block can be placed.\"\"\"\n for block_ in self._inventory:\n if block_ != exclude:\n return True\n return False\n\n def _set_block(self, pos, block_):\n \"\"\"Set a block. block_ is the block id.\"\"\"\n raise NotImplementedError\n\n def _move(self, pos):\n \"\"\"Move there only.\"\"\"\n self._pos = deepcopy(pos)\n\n\nclass _ImaginaryBot(_GenericBot):\n \"\"\"A bot used for finding paths that doesn't actually change blocks\n in the world.\"\"\"\n\n def __init__(self, pos, inventory=None):\n \"\"\"Create a new bot.\"\"\"\n _GenericBot.__init__(self, pos, inventory)\n self._changes = {} # Changes to the world\n\n def _set_block(self, pos, block_):\n \"\"\"Set a block. block_ is the block id.\"\"\"\n self._changes[deepcopy(pos)] = block\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n if pos in self._changes:\n return self._changes[pos]\n else:\n return _get_mc().getBlock(pos)\n\n def get_block(self, pos):\n \"\"\"The public version.\"\"\"\n return self._get_block(pos)\n\n def __hash__(self):\n \"\"\"Return the hash.\"\"\"\n return hash(frozenset([self._pos] + \\\n _key_vals(self._inventory) + \\\n _key_vals(self._changes)\n ))\n\n\nclass Bot(_GenericBot):\n \"\"\"The real bot.\n\n All vector arguments are Vec3s.\"\"\"\n\n _BOT_BLOCK = block.IRON_BLOCK.id\n\n def __init__(self):\n \"\"\"Create a bot next to the player.\"\"\"\n pos = _get_mc().player.getTilePos() + Vec3(2, 0, 0)\n pos = _Vec3(pos.x, pos.y, pos.z)\n _GenericBot.__init__(self, pos)\n self._pos = pos\n self._move(self._pos)\n\n @staticmethod\n def destroy_all():\n \"\"\"Destroy all bots within a small distance (in case I forget to\n destroy one).\"\"\"\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)\n\n def destroy(self):\n \"\"\"Set itself to air.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n\n def fetch(self, block_name):\n \"\"\"Mine and return a block to the player.\"\"\"\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n block_id = getattr(block, block_name).id\n block_loc = self._get_block_loc(block_id)\n mine_prob = _MineProblem(imag_bot, block_loc, block_id)\n mine_actions = astar(mine_prob, _mine_heuristic)\n self.take_actions(mine_actions, _DELAY)\n imag_bot = _ImaginaryBot(self._pos, self._inventory)\n player_loc = _player_loc()\n return_prob = _ReturnProblem(imag_bot, block_id, player_loc)\n return_actions = astar(return_prob, _return_heuristic)\n imag_bot.take_actions(return_actions)\n return_actions.append({\n 'func': '_place',\n 'args': (imag_bot.get_pos() + player_loc) / 2,\n 'kwargs': {'block': block_id}\n })\n self.take_actions(return_actions, _DELAY)\n\n def _get_block_loc(self, block_id):\n \"\"\"Return the location of the block.\"\"\"\n find_prob = FindProblem(self._pos, block_id)\n dirs = bfs(find_prob)\n return self._pos + sum(dirs)\n\n def _set_block(self, pos, block_):\n \"\"\"Place an actual block in the world.\n\n block is a block id.\"\"\"\n _get_mc().setBlock(pos, block_)\n\n def _get_block(self, pos):\n \"\"\"Get the block at the position.\"\"\"\n return _get_mc().getBlock(pos)\n\n def _move(self, pos):\n \"\"\"Move there, and set the appropriate blocks.\"\"\"\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n self._set_block(pos, self._BOT_BLOCK)\n self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK)\n self._pos = pos\n\n\nclass FindProblem(SearchProblem):\n \"\"\"Problem for finding the location of a block in the world.\n\n A state in this problem is a location.\n \"\"\"\n\n def __init__(self, start_loc, block_id):\n \"\"\"Initialize.\"\"\"\n self._start_loc = deepcopy(start_loc)\n self._block_id = block_id\n\n def getStartState(self):\n \"\"\"Return the starting location.\"\"\"\n return self._start_loc\n\n def isGoalState(self, state):\n return _get_mc().getBlock(state) == self._block_id\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for dir_ in _all_dirs():\n successor = state + dir_\n if successor.y <= _get_mc().getHeight(successor.x, successor.z) \\\n and _get_mc().getBlock(successor) != _BEDROCK:\n rtn.append((successor, dir_, 1))\n return rtn\n\n\nclass _MineProblem(SearchProblem):\n \"\"\"The problem of finding the block and mining it (not returning\n it).\"\"\"\n\n def __init__(self, imag_bot, block_loc, block_id):\n \"\"\"Initialize the problem with an _ImaginaryBot.\n\n block_loc is a Vec3.\n \"\"\"\n self._bot = imag_bot\n self._block_loc = deepcopy(block_loc)\n self._block_id = block_id\n\n def get_block_loc(self):\n \"\"\"Return the block location.\"\"\"\n return deepcopy(self._block_loc)\n\n def get_block_id(self):\n \"\"\"Return the block it's trying to mine.\"\"\"\n return self._block_id\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot has the block.\"\"\"\n return state.contains(self._block_id)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions():\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\nclass _ReturnProblem(SearchProblem):\n \"\"\"The problem of returning to the player. This does not place the block\n next to the player.\"\"\"\n\n def __init__(self, imag_bot, block_, player_loc):\n \"\"\"Initialized the problem with an _ImaginaryBot.\n\n block is a block id.\"\"\"\n self._bot = imag_bot\n self._block = block_\n self._player_loc = player_loc\n\n def get_player_loc(self):\n \"\"\"Return the player location.\"\"\"\n return deepcopy(self._player_loc)\n\n def getStartState(self):\n \"\"\"Return the bot passed in.\"\"\"\n return self._bot\n\n def isGoalState(self, state):\n \"\"\"Return whether or not the bot is next to the player.\"\"\"\n diff = state.get_pos() - self._player_loc\n return diff.y == 0 and (diff.x == 0 or diff.z == 0) and \\\n abs(diff.x) + abs(diff.z) == 2 and \\\n state.get_block(self._player_loc + diff/2 + _Vec3(0, -1, 0)) not in \\\n (_AIR, _LAVA, _WATER)\n\n def getSuccessors(self, state):\n \"\"\"Return the successors.\"\"\"\n rtn = []\n for action in state.get_legal_actions(self._block):\n successor = deepcopy(state)\n successor.take_action(action)\n rtn.append((successor, action, 1))\n return rtn\n\n\ndef _mine_heuristic(bot, problem):\n \"\"\"Return the mining heuristic.\n\n bot is an _ImaginaryBot.\n \"\"\"\n if bot.contains(problem.get_block_id()):\n return 0\n\n bot_pos = bot.get_pos()\n dest_pos = problem.get_block_loc()\n\n # If man == dy: return man + 1\n # If man > dy: return man\n # If man < dy: return dy?\n man_dist = _manhattan((bot_pos.x, bot_pos.z), (dest_pos.x, dest_pos.z))\n y_diff = bot_pos.y - dest_pos.y\n if y_diff < 0:\n y_diff += 1\n\n if y_diff == 0:\n return man_dist\n\n # Transform so that it's only dropping\n drop = _DROP if y_diff > 0 else 1\n y_diff = abs(y_diff)\n\n drops = _drops(y_diff, drop)\n\n if man_dist > drops:\n return man_dist\n if man_dist == drops:\n return man_dist + 1\n if drop == 1:\n return drops\n if y_diff % drop == 1:\n return drops\n return drops + 1\n \n\ndef _drops(dist, drop):\n \"\"\"Return the number of times it takes to drop a distance dist. drop is the\n length of one drop. Both are assumed positive.\"\"\"\n rtn = dist / drop\n if dist % drop != 0:\n rtn += 1\n return rtn\n \n\ndef _return_heuristic(bot, problem):\n \"\"\"Return the return heuristic.\n\n bot is an _ImaginaryBot.\n \"\"\"\n bot_pos = bot.get_pos()\n player_pos = problem.get_player_loc()\n bot_plane_pos = (bot.x, bot.z)\n\n y_diff = bot_pos.y - player_pos.y\n\n drop = _DROP if y_diff > 0 else 1\n y_diff = abs(y_diff)\n drops = _drops(y_diff, drop)\n min_man = float('inf')\n for dir_ in _adj_dirs():\n loc = player_pos + 2 * dir_\n man_dist = _manhattan(bot_plane_pos, (loc.x, loc.z))\n if man_dist < min_man:\n min_man = man_dist\n if man_dist < drops:\n return drops\n return min_man\n\n\ndef _to_my_vec3(vec):\n \"\"\"Return the _Vec3 alternative of the Vec3.\"\"\"\n return _Vec3(vec.x, vec.y, vec.z)\n\n\ndef _player_loc():\n \"\"\"Return the player's location.\"\"\"\n return _to_my_vec3(_get_mc().player.getTilePos())\n\n\ndef _adj_dirs():\n \"\"\"Return the adjacent directions.\"\"\"\n return [_Vec3(1, 0, 0), _Vec3(-1, 0, 0), _Vec3(0, 0, 1), _Vec3(0, 0, -1)]\n\n\ndef _all_dirs():\n \"\"\"Return all adjacent directions.\"\"\"\n return _adj_dirs() + [_Vec3(0, 1, 0), _Vec3(0, -1, 0)]\n\n\ndef _manhattan(pos1, pos2):\n \"\"\"Return the manhattan distance. pos1 and pos2 should be iterable.\"\"\"\n return sum(abs(val1 - val2) for val1, val2 in zip(pos1, pos2))\n\n\n@singleton\ndef _get_mc():\n \"\"\"Return the Minecraft instance.\"\"\"\n return minecraft.Minecraft.create()\n\n\ndef _key_vals(dict_):\n \"\"\"Return a list of key-val tuples.\"\"\"\n return [(key, val) for key, val in dict_.iteritems()]\n\n", "step-ids": [ 52, 53, 58, 60, 79 ] }
[ 52, 53, 58, 60, 79 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class ImageForm(forms.ModelForm): <|reserved_special_token_0|> class Meta: model = Profile fields = ['userimage'] <|reserved_special_token_1|> <|reserved_special_token_0|> class ImageForm(forms.ModelForm): userimage = forms.ImageField(required=False, error_messages={'invalid': 'Image file only'}, widget=forms.FileInput) class Meta: model = Profile fields = ['userimage'] <|reserved_special_token_1|> from django import forms from .models import Profile class ImageForm(forms.ModelForm): userimage = forms.ImageField(required=False, error_messages={'invalid': 'Image file only'}, widget=forms.FileInput) class Meta: model = Profile fields = ['userimage'] <|reserved_special_token_1|> from django import forms from .models import Profile class ImageForm(forms.ModelForm): userimage = forms.ImageField(required=False, error_messages={'invalid':("Image file only")}, widget=forms.FileInput) class Meta: model = Profile fields = ['userimage',]
flexible
{ "blob_id": "9081d0f75ac53ab8d0bafb39cd46a2fec8a5135f", "index": 3813, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass ImageForm(forms.ModelForm):\n <mask token>\n\n\n class Meta:\n model = Profile\n fields = ['userimage']\n", "step-3": "<mask token>\n\n\nclass ImageForm(forms.ModelForm):\n userimage = forms.ImageField(required=False, error_messages={'invalid':\n 'Image file only'}, widget=forms.FileInput)\n\n\n class Meta:\n model = Profile\n fields = ['userimage']\n", "step-4": "from django import forms\nfrom .models import Profile\n\n\nclass ImageForm(forms.ModelForm):\n userimage = forms.ImageField(required=False, error_messages={'invalid':\n 'Image file only'}, widget=forms.FileInput)\n\n\n class Meta:\n model = Profile\n fields = ['userimage']\n", "step-5": "from django import forms\nfrom .models import Profile\n\n\n\n\n \nclass ImageForm(forms.ModelForm):\n userimage = forms.ImageField(required=False, error_messages={'invalid':(\"Image file only\")}, widget=forms.FileInput)\n class Meta:\n model = Profile\n fields = ['userimage',]\n\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# coding: utf-8 # # Configuration # In[1]: CONNECTION_STRING = "mongodb://localhost:27017" DATABASE_NAME = "off" COLLECTION_NAME = "products" # # MongDB connection # In[2]: from pymongo import MongoClient from bson.code import Code import plotly, pymongo plotly.offline.init_notebook_mode() from plotly.graph_objs import Bar client = MongoClient(CONNECTION_STRING) db = client[DATABASE_NAME] openfood = db[COLLECTION_NAME] # # Nutrition grade # In[6]: mapper = Code(""" function () { if (typeof this.nutrition_grades !== 'undefined' && this.nutrition_grades !== ""){ emit(this.nutrition_grades, 1); } }""") reducer = Code(""" function (key, values) { var total = 0; for (var i = 0; i < values.length; i++) { total += values[i]; } return total; }""") grades = openfood.inline_map_reduce(mapper, reducer) print grades # In[14]: import numpy as np import matplotlib.pyplot as plt objects = [item['_id'] for item in grades] # [a,b,c,d,e] y_pos = np.arange(len(objects)) count = [item['value'] for item in grades] plt.bar(y_pos, count, align='center', alpha=0.5) plt.xticks(y_pos, objects) plt.ylabel('Count') plt.title('Nutrition Grades') plt.show() # Each food entry states the countries which the food it is sold. Below, we try to find out the list of countries which the food are sold. # # Nutrients (100g) # In[16]: mapper = Code(""" function () { if (typeof this.nutriments !== 'undefined' && this.nutriments !== "") { for (var key in this.nutriments) { if (key.match(/.*100g/)) emit(key, null); } } }""") reducer = Code(""" function (key, values) { return key }""") nutriments_100g_fields = openfood.inline_map_reduce(mapper, reducer) for n in nutriments_100g_fields: print n # In[17]: for n in nutriments_100g_fields: print n['_id'] # # Additives # In[24]: mapper = Code(""" function () { if (typeof this.additives !== "undefined" && this.additives_n >= 0){ var add = this.additives.substring(3, this.additives.length-3); // remove "^ [ " and " ] $" var add_str = add.split(" ] [ "); for (var i = 0; i < add_str.length; i++){ var additive_parts = add_str[i].split(" -> exists -- "); if (additive_parts.length == 2){ var add_code = additive_parts[0].split(" -> ")[1]; emit(add_code, 1); } } } }""") reducer = Code(""" function (key, values) { var total = 0; for (var i = 0; i < values.length; i++) { total += values[i]; } return total; }""") additives_stats = openfood.inline_map_reduce(mapper, reducer) print additives_stats # In[29]: add_clean = [(x['value'], x['_id']) for x in additives_stats] add_clean.sort() print len(add_clean) for add in add_clean: print "{}: {}".format(add[0], add[1])
normal
{ "blob_id": "2ecd234753fabbca2829dc86db2f740e371e4ea7", "index": 6499, "step-1": "\n# coding: utf-8\n\n# # Configuration\n\n# In[1]:\n\nCONNECTION_STRING = \"mongodb://localhost:27017\"\nDATABASE_NAME = \"off\"\nCOLLECTION_NAME = \"products\"\n\n\n# # MongDB connection\n\n# In[2]:\n\nfrom pymongo import MongoClient\nfrom bson.code import Code\nimport plotly, pymongo\nplotly.offline.init_notebook_mode()\nfrom plotly.graph_objs import Bar\n\nclient = MongoClient(CONNECTION_STRING)\ndb = client[DATABASE_NAME]\nopenfood = db[COLLECTION_NAME]\n\n\n# # Nutrition grade\n\n# In[6]:\n\nmapper = Code(\"\"\"\n function () {\n if (typeof this.nutrition_grades !== 'undefined' && this.nutrition_grades !== \"\"){\n emit(this.nutrition_grades, 1);\n }\n }\"\"\")\nreducer = Code(\"\"\"\n function (key, values) {\n var total = 0;\n for (var i = 0; i < values.length; i++) {\n total += values[i];\n }\n return total;\n }\"\"\")\n\ngrades = openfood.inline_map_reduce(mapper, reducer)\nprint grades\n\n\n# In[14]:\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n \nobjects = [item['_id'] for item in grades] # [a,b,c,d,e]\ny_pos = np.arange(len(objects))\ncount = [item['value'] for item in grades]\n \nplt.bar(y_pos, count, align='center', alpha=0.5)\nplt.xticks(y_pos, objects)\nplt.ylabel('Count')\nplt.title('Nutrition Grades')\n \nplt.show()\n\n\n# Each food entry states the countries which the food it is sold. Below, we try to find out the list of countries which the food are sold.\n\n# # Nutrients (100g)\n\n# In[16]:\n\nmapper = Code(\"\"\"\n function () {\n if (typeof this.nutriments !== 'undefined' && this.nutriments !== \"\") {\n for (var key in this.nutriments) {\n if (key.match(/.*100g/))\n emit(key, null);\n }\n }\n }\"\"\")\nreducer = Code(\"\"\"\n function (key, values) {\n return key\n }\"\"\")\n\nnutriments_100g_fields = openfood.inline_map_reduce(mapper, reducer)\nfor n in nutriments_100g_fields:\n print n\n\n\n# In[17]:\n\nfor n in nutriments_100g_fields:\n print n['_id']\n\n\n# # Additives\n\n# In[24]:\n\nmapper = Code(\"\"\"\n function () {\n if (typeof this.additives !== \"undefined\" && this.additives_n >= 0){\n var add = this.additives.substring(3, this.additives.length-3); // remove \"^ [ \" and \" ] $\"\n var add_str = add.split(\" ] [ \");\n for (var i = 0; i < add_str.length; i++){\n var additive_parts = add_str[i].split(\" -> exists -- \");\n if (additive_parts.length == 2){\n var add_code = additive_parts[0].split(\" -> \")[1];\n emit(add_code, 1);\n }\n }\n }\n }\"\"\")\nreducer = Code(\"\"\"\n function (key, values) {\n var total = 0;\n for (var i = 0; i < values.length; i++) {\n total += values[i];\n }\n return total;\n }\"\"\")\n\nadditives_stats = openfood.inline_map_reduce(mapper, reducer)\nprint additives_stats\n\n\n# In[29]:\n\nadd_clean = [(x['value'], x['_id']) for x in additives_stats]\nadd_clean.sort()\n\nprint len(add_clean)\nfor add in add_clean:\n print \"{}: {}\".format(add[0], add[1])\n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from smarts.core.utils.class_factory import ClassRegister agent_registry = ClassRegister() def register(locator: str, entry_point, **kwargs): """Register an AgentSpec with the zoo. In order to load a registered AgentSpec it needs to be reachable from a directory contained in the PYTHONPATH. Args: locator: A string in the format of 'locator-name' entry_point: A callable that returns an AgentSpec or an AgentSpec object For example: .. code-block:: python register( locator="motion-planner-agent-v0", entry_point=lambda **kwargs: AgentSpec( interface=AgentInterface(waypoint_paths=True, action=ActionSpaceType.TargetPose), agent_builder=MotionPlannerAgent, ), ) """ agent_registry.register(name=locator, entry_point=entry_point, **kwargs) def make(locator: str, **kwargs): """Create an AgentSpec from the given locator. In order to load a registered AgentSpec it needs to be reachable from a directory contained in the PYTHONPATH. Args: locator: A string in the format of 'path.to.file:locator-name' where the path is in the form `{PYTHONPATH}[n]/path/to/file.py` kwargs: Additional arguments to be passed to the constructed class. Returns: AgentSpec: The agent specifications needed to instantiate and configure an agent. """ from smarts.zoo.agent_spec import AgentSpec agent_spec = agent_registry.make(locator, **kwargs) assert isinstance( agent_spec, AgentSpec ), f"Expected make to produce an instance of AgentSpec, got: {agent_spec}" return agent_spec def make_agent(locator: str, **kwargs): """Create an Agent from the given agent spec locator. In order to load a registered AgentSpec it needs to be reachable from a directory contained in the PYTHONPATH. Args: locator: A string in the format of 'path.to.file:locator-name' where the path is in the form `{PYTHONPATH}[n]/path/to/file.py` kwargs: Additional arguments to be passed to the constructed class. Returns: Tuple[Agent, AgentInterface]: The agent and its interface. """ agent_spec = make(locator, **kwargs) return agent_spec.build_agent(), agent_spec.interface
normal
{ "blob_id": "b77c40c89c88b49c851e9a14c67cf0799d6de847", "index": 9235, "step-1": "<mask token>\n\n\ndef register(locator: str, entry_point, **kwargs):\n \"\"\"Register an AgentSpec with the zoo.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'locator-name'\n entry_point:\n A callable that returns an AgentSpec or an AgentSpec object\n\n For example:\n\n .. code-block:: python\n\n register(\n locator=\"motion-planner-agent-v0\",\n entry_point=lambda **kwargs: AgentSpec(\n interface=AgentInterface(waypoint_paths=True, action=ActionSpaceType.TargetPose),\n agent_builder=MotionPlannerAgent,\n ),\n )\n \"\"\"\n agent_registry.register(name=locator, entry_point=entry_point, **kwargs)\n\n\ndef make(locator: str, **kwargs):\n \"\"\"Create an AgentSpec from the given locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n AgentSpec: The agent specifications needed to instantiate and configure an agent.\n \"\"\"\n from smarts.zoo.agent_spec import AgentSpec\n agent_spec = agent_registry.make(locator, **kwargs)\n assert isinstance(agent_spec, AgentSpec\n ), f'Expected make to produce an instance of AgentSpec, got: {agent_spec}'\n return agent_spec\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef register(locator: str, entry_point, **kwargs):\n \"\"\"Register an AgentSpec with the zoo.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'locator-name'\n entry_point:\n A callable that returns an AgentSpec or an AgentSpec object\n\n For example:\n\n .. code-block:: python\n\n register(\n locator=\"motion-planner-agent-v0\",\n entry_point=lambda **kwargs: AgentSpec(\n interface=AgentInterface(waypoint_paths=True, action=ActionSpaceType.TargetPose),\n agent_builder=MotionPlannerAgent,\n ),\n )\n \"\"\"\n agent_registry.register(name=locator, entry_point=entry_point, **kwargs)\n\n\ndef make(locator: str, **kwargs):\n \"\"\"Create an AgentSpec from the given locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n AgentSpec: The agent specifications needed to instantiate and configure an agent.\n \"\"\"\n from smarts.zoo.agent_spec import AgentSpec\n agent_spec = agent_registry.make(locator, **kwargs)\n assert isinstance(agent_spec, AgentSpec\n ), f'Expected make to produce an instance of AgentSpec, got: {agent_spec}'\n return agent_spec\n\n\ndef make_agent(locator: str, **kwargs):\n \"\"\"Create an Agent from the given agent spec locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n Tuple[Agent, AgentInterface]: The agent and its interface.\n \"\"\"\n agent_spec = make(locator, **kwargs)\n return agent_spec.build_agent(), agent_spec.interface\n", "step-3": "<mask token>\nagent_registry = ClassRegister()\n\n\ndef register(locator: str, entry_point, **kwargs):\n \"\"\"Register an AgentSpec with the zoo.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'locator-name'\n entry_point:\n A callable that returns an AgentSpec or an AgentSpec object\n\n For example:\n\n .. code-block:: python\n\n register(\n locator=\"motion-planner-agent-v0\",\n entry_point=lambda **kwargs: AgentSpec(\n interface=AgentInterface(waypoint_paths=True, action=ActionSpaceType.TargetPose),\n agent_builder=MotionPlannerAgent,\n ),\n )\n \"\"\"\n agent_registry.register(name=locator, entry_point=entry_point, **kwargs)\n\n\ndef make(locator: str, **kwargs):\n \"\"\"Create an AgentSpec from the given locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n AgentSpec: The agent specifications needed to instantiate and configure an agent.\n \"\"\"\n from smarts.zoo.agent_spec import AgentSpec\n agent_spec = agent_registry.make(locator, **kwargs)\n assert isinstance(agent_spec, AgentSpec\n ), f'Expected make to produce an instance of AgentSpec, got: {agent_spec}'\n return agent_spec\n\n\ndef make_agent(locator: str, **kwargs):\n \"\"\"Create an Agent from the given agent spec locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n Tuple[Agent, AgentInterface]: The agent and its interface.\n \"\"\"\n agent_spec = make(locator, **kwargs)\n return agent_spec.build_agent(), agent_spec.interface\n", "step-4": "from smarts.core.utils.class_factory import ClassRegister\nagent_registry = ClassRegister()\n\n\ndef register(locator: str, entry_point, **kwargs):\n \"\"\"Register an AgentSpec with the zoo.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'locator-name'\n entry_point:\n A callable that returns an AgentSpec or an AgentSpec object\n\n For example:\n\n .. code-block:: python\n\n register(\n locator=\"motion-planner-agent-v0\",\n entry_point=lambda **kwargs: AgentSpec(\n interface=AgentInterface(waypoint_paths=True, action=ActionSpaceType.TargetPose),\n agent_builder=MotionPlannerAgent,\n ),\n )\n \"\"\"\n agent_registry.register(name=locator, entry_point=entry_point, **kwargs)\n\n\ndef make(locator: str, **kwargs):\n \"\"\"Create an AgentSpec from the given locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n AgentSpec: The agent specifications needed to instantiate and configure an agent.\n \"\"\"\n from smarts.zoo.agent_spec import AgentSpec\n agent_spec = agent_registry.make(locator, **kwargs)\n assert isinstance(agent_spec, AgentSpec\n ), f'Expected make to produce an instance of AgentSpec, got: {agent_spec}'\n return agent_spec\n\n\ndef make_agent(locator: str, **kwargs):\n \"\"\"Create an Agent from the given agent spec locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n Tuple[Agent, AgentInterface]: The agent and its interface.\n \"\"\"\n agent_spec = make(locator, **kwargs)\n return agent_spec.build_agent(), agent_spec.interface\n", "step-5": "# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\nfrom smarts.core.utils.class_factory import ClassRegister\n\nagent_registry = ClassRegister()\n\n\ndef register(locator: str, entry_point, **kwargs):\n \"\"\"Register an AgentSpec with the zoo.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'locator-name'\n entry_point:\n A callable that returns an AgentSpec or an AgentSpec object\n\n For example:\n\n .. code-block:: python\n\n register(\n locator=\"motion-planner-agent-v0\",\n entry_point=lambda **kwargs: AgentSpec(\n interface=AgentInterface(waypoint_paths=True, action=ActionSpaceType.TargetPose),\n agent_builder=MotionPlannerAgent,\n ),\n )\n \"\"\"\n\n agent_registry.register(name=locator, entry_point=entry_point, **kwargs)\n\n\ndef make(locator: str, **kwargs):\n \"\"\"Create an AgentSpec from the given locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n AgentSpec: The agent specifications needed to instantiate and configure an agent.\n \"\"\"\n\n from smarts.zoo.agent_spec import AgentSpec\n\n agent_spec = agent_registry.make(locator, **kwargs)\n assert isinstance(\n agent_spec, AgentSpec\n ), f\"Expected make to produce an instance of AgentSpec, got: {agent_spec}\"\n\n return agent_spec\n\n\ndef make_agent(locator: str, **kwargs):\n \"\"\"Create an Agent from the given agent spec locator.\n\n In order to load a registered AgentSpec it needs to be reachable from a\n directory contained in the PYTHONPATH.\n\n Args:\n locator:\n A string in the format of 'path.to.file:locator-name' where the path\n is in the form `{PYTHONPATH}[n]/path/to/file.py`\n kwargs:\n Additional arguments to be passed to the constructed class.\n Returns:\n Tuple[Agent, AgentInterface]: The agent and its interface.\n \"\"\"\n\n agent_spec = make(locator, **kwargs)\n\n return agent_spec.build_agent(), agent_spec.interface\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> {'name': 'ldap_user', 'summary': '', 'description': '域账号用户管理,登录及查询用户信息', 'author': '', 'website': '', 'source': {'git': 'https://github.com/LeiQiao/Parasite-Plugins.git', 'branch': 'master'}, 'category': '', 'version': '0.1', 'api': {'/user/token': 'user_api.gen_token', '/user/captcha': 'user_api.gen_captcha', '/user/login': {'POST': 'user_api.login'}, '/user/search': 'user_api.search_users'}, 'depends': ['base', 'base_api_wrapper', 'redis_client', 'i18n']} <|reserved_special_token_1|> # noinspection PyStatementEffect { 'name': 'ldap_user', 'summary': '', 'description': '域账号用户管理,登录及查询用户信息', 'author': '', 'website': '', 'source': {'git': 'https://github.com/LeiQiao/Parasite-Plugins.git', 'branch': 'master'}, 'category': '', 'version': '0.1', 'api': { '/user/token': 'user_api.gen_token', '/user/captcha': 'user_api.gen_captcha', '/user/login': { 'POST': 'user_api.login' }, '/user/search': 'user_api.search_users' }, # any plugin necessary for this one to work correctly 'depends': ['base', 'base_api_wrapper', 'redis_client', 'i18n'] }
flexible
{ "blob_id": "b95619f3f52ff3747e38ecc153123962d0122a4d", "index": 387, "step-1": "<mask token>\n", "step-2": "{'name': 'ldap_user', 'summary': '', 'description': '域账号用户管理,登录及查询用户信息',\n 'author': '', 'website': '', 'source': {'git':\n 'https://github.com/LeiQiao/Parasite-Plugins.git', 'branch': 'master'},\n 'category': '', 'version': '0.1', 'api': {'/user/token':\n 'user_api.gen_token', '/user/captcha': 'user_api.gen_captcha',\n '/user/login': {'POST': 'user_api.login'}, '/user/search':\n 'user_api.search_users'}, 'depends': ['base', 'base_api_wrapper',\n 'redis_client', 'i18n']}\n", "step-3": "# noinspection PyStatementEffect\n{\n 'name': 'ldap_user',\n 'summary': '',\n 'description': '域账号用户管理,登录及查询用户信息',\n 'author': '',\n 'website': '',\n 'source': {'git': 'https://github.com/LeiQiao/Parasite-Plugins.git', 'branch': 'master'},\n\n 'category': '',\n 'version': '0.1',\n\n 'api': {\n '/user/token': 'user_api.gen_token',\n '/user/captcha': 'user_api.gen_captcha',\n '/user/login': {\n 'POST': 'user_api.login'\n },\n '/user/search': 'user_api.search_users'\n },\n\n # any plugin necessary for this one to work correctly\n 'depends': ['base', 'base_api_wrapper', 'redis_client', 'i18n']\n}\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> def isPrime(n): if n <= 1: return False if n <= 3: return True if n % 2 == 0 or n % 3 == 0: return False i = 5 while i * i <= n: if n % i == 0 or n % (i + 2) == 0: return False i = i + 6 return True <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def isPrime(n): if n <= 1: return False if n <= 3: return True if n % 2 == 0 or n % 3 == 0: return False i = 5 while i * i <= n: if n % i == 0 or n % (i + 2) == 0: return False i = i + 6 return True def primeList(n1, n2): l = [] for n in range(n1, n2 + 1): if isPrime(n): l.append(n) return l <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def isPrime(n): if n <= 1: return False if n <= 3: return True if n % 2 == 0 or n % 3 == 0: return False i = 5 while i * i <= n: if n % i == 0 or n % (i + 2) == 0: return False i = i + 6 return True def primeList(n1, n2): l = [] for n in range(n1, n2 + 1): if isPrime(n): l.append(n) return l <|reserved_special_token_0|> for i in range(l): for j in range(l): if i == j: continue l2.append(str(l1[i]) + str(l1[j])) <|reserved_special_token_0|> for i in l3: if str(i) in l2: l4.append(i) <|reserved_special_token_0|> for i in range(2, count): f = x + y x = y y = f print(y) <|reserved_special_token_1|> <|reserved_special_token_0|> def isPrime(n): if n <= 1: return False if n <= 3: return True if n % 2 == 0 or n % 3 == 0: return False i = 5 while i * i <= n: if n % i == 0 or n % (i + 2) == 0: return False i = i + 6 return True def primeList(n1, n2): l = [] for n in range(n1, n2 + 1): if isPrime(n): l.append(n) return l n1, n2 = map(int, input().split()) l1 = primeList(n1, n2) l2 = list() l = len(l1) for i in range(l): for j in range(l): if i == j: continue l2.append(str(l1[i]) + str(l1[j])) l3 = primeList(int(l2[0]), int(l2[-1])) l4 = [] for i in l3: if str(i) in l2: l4.append(i) x = min(l4) y = max(l4) count = len(l4) for i in range(2, count): f = x + y x = y y = f print(y) <|reserved_special_token_1|> ''' Problem Description Given two numbers n1 and n2 1. Find prime numbers between n1 and n2, then 2. Make all possible unique combinations of numbers from the prime numbers list you found in step 1. 3. From this new list, again find all prime numbers. 4. Find smallest (a) and largest (b) number from the 2nd generated list, also count of this list. 5. Consider smallest and largest number as the 1st and 2nd number to generate Fibonacci series respectively till the count (number of primes in the 2nd list). 6. Print the last number of a Fibonacci series as an output Constraints 2 <= n1, n2 <= 100 n2 - n1 >= 35 Input Format One line containing two space separated integers n1 and n2. Output Last number of a generated Fibonacci series. Timeout 1 Test Case Example 1 Input : 2 40 Output : 13158006689 Explanation : 1st prime list = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37] Combination of all the primes = [23, 25, 27, 211, 213, 217, 219, 223, 229, 231, 32, 35, 37, 311, 313, 319, 323, 329, 331, 337, 52, 53, 57, 511, 513, 517, 519, 523, 529, 531, 537, 72, 73, 75, 711, 713, 717, 719, 723, 729, 731, 737, 112, 113, 115, 117, 1113, 1117, 1119, 1123, 1129, 1131, 1137, 132, 133, 135, 137, 1311, 1317, 1319, 1323, 1329, 1331, 1337, 172, 173, 175, 177, 1711, 1713, 1719, 1723, 1729, 1731, 1737, 192, 193, 195, 197, 1911, 1913, 1917, 1923, 1929, 1931, 1937, 232, 233, 235, 237, 2311, 2313, 2317, 2319, 2329, 2331, 2337, 292, 293, 295, 297, 2911, 2913, 2917, 2919, 2923, 2931, 2937, 312, 315, 317, 3111, 3113, 3117, 3119, 3123, 3129, 3137, 372, 373, 375, 377, 3711, 3713, 3717, 3719, 3723, 3729, 3731] 2nd prime list=[193, 3137, 197, 2311, 3719, 73, 137, 331, 523, 1931, 719, 337, 211, 23, 1117, 223, 1123, 229, 37, 293, 2917, 1319, 1129, 233, 173, 3119, 113, 53, 373, 311, 313, 1913, 1723, 317] smallest (a) = 23 largest (b) = 3719 Therefore, the last number of a Fibonacci series i.e. 34th Fibonacci number in the series that has 23 and 3719 as the first 2 numbers is 13158006689 Example 2 Input : 30 70 Output : 2027041 Explanation 1st prime list=[31, 37, 41, 43, 47, 53, 59, 61, 67] 2nd prime list generated form combination of 1st prime list = [3137, 5953, 5347, 6761, 3761, 4337, 6737, 6131, 3767, 4759, 4153, 3167, 4159, 6143] smallest prime in 2nd list=3137 largest prime in 2nd list=6761 Therefore, the last number of a Fibonacci series i.e. 14th Fibonacci number in the series that has 3137 and 6761 as the first 2 numbers is 2027041 ''' # test cases passed , private cases failed # https://www.rookieslab.com/posts/fastest-way-to-check-if-a-number-is-prime-or-not # seive of Eratosthenes method # N = 100 # is_prime = [1]*N # is_prime[0] = 0 # is_prime[1] = 0 # https://www.geeksforgeeks.org/python-program-to-check-whether-a-number-is-prime-or-not/ def isPrime(n): # use to find if number is prime in 2nd list # Corner cases if (n <= 1) : return False if (n <= 3) : return True # This is checked so that we can skip # middle five numbers in below loop if (n % 2 == 0 or n % 3 == 0) : return False i = 5 while(i * i <= n) : if (n % i == 0 or n % (i + 2) == 0) : return False i = i + 6 return True def primeList(n1, n2): l = [] for n in range(n1, n2+1): if isPrime(n): l.append(n) return l n1, n2 = map(int, input().split()) l1 = primeList(n1,n2) # print(l1) - check if first list of prime numbers matches #combining l2 = list() l = len(l1) for i in range(l): for j in range(l): if i == j: continue l2.append(str(l1[i])+str(l1[j])) l3 = primeList(int(l2[0]),int(l2[-1])) # list of primes from the second list l4 = [] for i in l3: if str(i) in l2: l4.append(i) # print(l4) - check if secin list of prime numbers matches x = min(l4) y = max(l4) count = len(l4) # print(x,y,count) - check if smallest, largest prime and count match for i in range(2,count): f = x + y x = y y = f print(y)
flexible
{ "blob_id": "fe5050fdf010ce1c4d458b8a52ac92485a7d8cea", "index": 5706, "step-1": "<mask token>\n\n\ndef isPrime(n):\n if n <= 1:\n return False\n if n <= 3:\n return True\n if n % 2 == 0 or n % 3 == 0:\n return False\n i = 5\n while i * i <= n:\n if n % i == 0 or n % (i + 2) == 0:\n return False\n i = i + 6\n return True\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef isPrime(n):\n if n <= 1:\n return False\n if n <= 3:\n return True\n if n % 2 == 0 or n % 3 == 0:\n return False\n i = 5\n while i * i <= n:\n if n % i == 0 or n % (i + 2) == 0:\n return False\n i = i + 6\n return True\n\n\ndef primeList(n1, n2):\n l = []\n for n in range(n1, n2 + 1):\n if isPrime(n):\n l.append(n)\n return l\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef isPrime(n):\n if n <= 1:\n return False\n if n <= 3:\n return True\n if n % 2 == 0 or n % 3 == 0:\n return False\n i = 5\n while i * i <= n:\n if n % i == 0 or n % (i + 2) == 0:\n return False\n i = i + 6\n return True\n\n\ndef primeList(n1, n2):\n l = []\n for n in range(n1, n2 + 1):\n if isPrime(n):\n l.append(n)\n return l\n\n\n<mask token>\nfor i in range(l):\n for j in range(l):\n if i == j:\n continue\n l2.append(str(l1[i]) + str(l1[j]))\n<mask token>\nfor i in l3:\n if str(i) in l2:\n l4.append(i)\n<mask token>\nfor i in range(2, count):\n f = x + y\n x = y\n y = f\nprint(y)\n", "step-4": "<mask token>\n\n\ndef isPrime(n):\n if n <= 1:\n return False\n if n <= 3:\n return True\n if n % 2 == 0 or n % 3 == 0:\n return False\n i = 5\n while i * i <= n:\n if n % i == 0 or n % (i + 2) == 0:\n return False\n i = i + 6\n return True\n\n\ndef primeList(n1, n2):\n l = []\n for n in range(n1, n2 + 1):\n if isPrime(n):\n l.append(n)\n return l\n\n\nn1, n2 = map(int, input().split())\nl1 = primeList(n1, n2)\nl2 = list()\nl = len(l1)\nfor i in range(l):\n for j in range(l):\n if i == j:\n continue\n l2.append(str(l1[i]) + str(l1[j]))\nl3 = primeList(int(l2[0]), int(l2[-1]))\nl4 = []\nfor i in l3:\n if str(i) in l2:\n l4.append(i)\nx = min(l4)\ny = max(l4)\ncount = len(l4)\nfor i in range(2, count):\n f = x + y\n x = y\n y = f\nprint(y)\n", "step-5": "'''\nProblem Description\nGiven two numbers n1 and n2\n\n1. Find prime numbers between n1 and n2, then\n\n2. Make all possible unique combinations of numbers from the prime \nnumbers list you found in step 1. \n\n3. From this new list, again find all prime numbers.\n\n4. Find smallest (a) and largest (b) number from the 2nd generated \nlist, also count of this list.\n\n5. Consider smallest and largest number as the 1st and 2nd number \nto generate Fibonacci series respectively till the count \n(number of primes in the 2nd list).\n\n6. Print the last number of a Fibonacci series as an output\n\nConstraints\n2 <= n1, n2 <= 100\n\nn2 - n1 >= 35\n\nInput Format\nOne line containing two space separated integers n1 and n2.\n\nOutput\nLast number of a generated Fibonacci series.\n\nTimeout\n1\n\n\nTest Case\nExample 1\nInput : 2 40\nOutput : 13158006689\n\nExplanation :\n\n1st prime list = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37]\n\nCombination of all the primes = [23, 25, 27, 211, 213, 217, 219, \n223, 229, 231, 32, 35, 37, 311, 313, 319, 323, 329, 331, 337, 52, \n53, 57, 511, 513, 517, 519, 523, 529, 531, 537, 72, 73, 75, 711, \n713, 717, 719, 723, 729, 731, 737, 112, 113, 115, 117, 1113, 1117, \n1119, 1123, 1129, 1131, 1137, 132, 133, 135, 137, 1311, 1317, 1319, \n1323, 1329, 1331, 1337, 172, 173, 175, 177, 1711, 1713, 1719, 1723, \n1729, 1731, 1737, 192, 193, 195, 197, 1911, 1913, 1917, 1923, 1929, \n1931, 1937, 232, 233, 235, 237, 2311, 2313, 2317, 2319, 2329, 2331, \n2337, 292, 293, 295, 297, 2911, 2913, 2917, 2919, 2923, 2931, 2937, \n312, 315, 317, 3111, 3113, 3117, 3119, 3123, 3129, 3137, 372, 373, \n375, 377, 3711, 3713, 3717, 3719, 3723, 3729, 3731]\n\n2nd prime list=[193, 3137, 197, 2311, 3719, 73, 137, 331, 523, \n1931, 719, 337, 211, 23, 1117, 223, 1123, 229, 37, 293, 2917, \n1319, 1129, 233, 173, 3119, 113, 53, 373, 311, 313, 1913, 1723, \n317]\n\nsmallest (a) = 23\n\nlargest (b) = 3719\n\nTherefore, the last number of a Fibonacci series i.e. 34th \nFibonacci number in the series that has 23 and 3719 as the first \n2 numbers is 13158006689\n\nExample 2\nInput : 30 70\nOutput : 2027041 \n\nExplanation\n\n1st prime list=[31, 37, 41, 43, 47, 53, 59, 61, 67]\n\n2nd prime list generated form combination of 1st prime list = [3137, \n5953, 5347, 6761, 3761, 4337, 6737, 6131, 3767, 4759, 4153, 3167, \n4159, 6143]\n\nsmallest prime in 2nd list=3137\nlargest prime in 2nd list=6761\n\nTherefore, the last number of a Fibonacci series i.e. 14th \nFibonacci number in the series that has 3137 and 6761 as the first \n2 numbers is 2027041\n'''\n\n# test cases passed , private cases failed\n\n# https://www.rookieslab.com/posts/fastest-way-to-check-if-a-number-is-prime-or-not\n# seive of Eratosthenes method\n\n# N = 100\n# is_prime = [1]*N\n# is_prime[0] = 0\n# is_prime[1] = 0\n\n# https://www.geeksforgeeks.org/python-program-to-check-whether-a-number-is-prime-or-not/\ndef isPrime(n): # use to find if number is prime in 2nd list\n \n # Corner cases \n if (n <= 1) : \n return False\n if (n <= 3) : \n return True\n \n # This is checked so that we can skip \n # middle five numbers in below loop \n if (n % 2 == 0 or n % 3 == 0) : \n return False\n \n i = 5\n while(i * i <= n) : \n if (n % i == 0 or n % (i + 2) == 0) : \n return False\n i = i + 6\n \n return True\n\ndef primeList(n1, n2):\n l = []\n for n in range(n1, n2+1):\n if isPrime(n):\n l.append(n)\n return l\n\nn1, n2 = map(int, input().split())\nl1 = primeList(n1,n2)\n# print(l1) - check if first list of prime numbers matches\n\n#combining\nl2 = list()\nl = len(l1)\nfor i in range(l):\n for j in range(l):\n if i == j:\n continue\n l2.append(str(l1[i])+str(l1[j]))\n\nl3 = primeList(int(l2[0]),int(l2[-1])) \n# list of primes from the second list\nl4 = []\nfor i in l3:\n if str(i) in l2:\n l4.append(i)\n# print(l4) - check if secin list of prime numbers matches\n\nx = min(l4)\ny = max(l4)\ncount = len(l4)\n# print(x,y,count) - check if smallest, largest prime and count match\nfor i in range(2,count):\n f = x + y\n x = y\n y = f\nprint(y)", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
import FWCore.ParameterSet.Config as cms from RecoTracker.MeasurementDet.UpdaterService_cfi import * from RecoTracker.MeasurementDet.MeasurementTrackerESProducer_cfi import *
normal
{ "blob_id": "e79505e802a06f091bbb12708c45e04c4e80da60", "index": 7618, "step-1": "<mask token>\n", "step-2": "import FWCore.ParameterSet.Config as cms\nfrom RecoTracker.MeasurementDet.UpdaterService_cfi import *\nfrom RecoTracker.MeasurementDet.MeasurementTrackerESProducer_cfi import *\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
# This script is for character creation. print ("Welcome to the character wizard creation!") # Here you will select your race from the list. race = ["human", "ork", "elf"] print race race = raw_input("Please choose your race: ") print "You have choosen %r" %race # Here you will select your gender. gender = ["male", "female"] print gender gender = raw_input("Please choose your gender: ") print "You have choosen %r" %gender character = {'race': race, 'gender': gender}
normal
{ "blob_id": "243016b14f503a09147f434e7bec31dc204fafdf", "index": 1158, "step-1": "# This script is for character creation.\r\nprint (\"Welcome to the character wizard creation!\")\r\n\r\n# Here you will select your race from the list.\r\nrace = [\"human\", \"ork\", \"elf\"]\r\nprint race\r\nrace = raw_input(\"Please choose your race: \")\r\nprint \"You have choosen %r\" %race\r\n\r\n# Here you will select your gender.\r\ngender = [\"male\", \"female\"]\r\nprint gender\r\ngender = raw_input(\"Please choose your gender: \")\r\nprint \"You have choosen %r\" %gender\r\n\r\ncharacter = {'race': race, 'gender': gender}\r\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import shlex class MockSOLR(object): class MockHits(list): @property def hits(self): return len(self) @property def docs(self): return self def __init__(self): self.db = {} def add(self, objects): for o in objects: o['text'] = ''.join(o['text']) self.db[o['id']] = o def commit(self): pass def search(self, q, fq=None, **kw): if isinstance(q, unicode): q = q.encode('latin-1') # Parse query preds = [] q_parts = shlex.split(q) if fq: q_parts += fq for part in q_parts: if part == '&&': continue if ':' in part: field, value = part.split(':', 1) preds.append((field, value)) else: preds.append(('text', part)) result = self.MockHits() for obj in self.db.values(): for field, value in preds: neg = False if field[0] == '!': neg = True field = field[1:] if field == 'text' or field.endswith('_t'): if (value not in str(obj.get(field, ''))) ^ neg: break else: if (value != str(obj.get(field, ''))) ^ neg: break else: result.append(obj) return result def delete(self, *args, **kwargs): if kwargs.get('q', None) == '*:*': self.db = {} elif kwargs.get('id', None): del self.db[kwargs['id']] elif kwargs.get('q', None): for doc in self.search(kwargs['q']): self.delete(id=doc['id'])
normal
{ "blob_id": "4774c1f4eafc0132bab0073b60c4bcad6b69380d", "index": 9068, "step-1": "<mask token>\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n <mask token>\n <mask token>\n <mask token>\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-2": "<mask token>\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n <mask token>\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-3": "<mask token>\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n\n def commit(self):\n pass\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-4": "import shlex\n\n\nclass MockSOLR(object):\n\n\n class MockHits(list):\n\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n\n def commit(self):\n pass\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n elif (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-5": "import shlex\n\n\nclass MockSOLR(object):\n\n class MockHits(list):\n @property\n def hits(self):\n return len(self)\n\n @property\n def docs(self):\n return self\n\n def __init__(self):\n self.db = {}\n\n def add(self, objects):\n for o in objects:\n o['text'] = ''.join(o['text'])\n self.db[o['id']] = o\n\n def commit(self):\n pass\n\n def search(self, q, fq=None, **kw):\n if isinstance(q, unicode):\n q = q.encode('latin-1')\n # Parse query\n preds = []\n q_parts = shlex.split(q)\n if fq:\n q_parts += fq\n for part in q_parts:\n if part == '&&':\n continue\n if ':' in part:\n field, value = part.split(':', 1)\n preds.append((field, value))\n else:\n preds.append(('text', part))\n result = self.MockHits()\n for obj in self.db.values():\n for field, value in preds:\n neg = False\n if field[0] == '!':\n neg = True\n field = field[1:]\n if field == 'text' or field.endswith('_t'):\n if (value not in str(obj.get(field, ''))) ^ neg:\n break\n else:\n if (value != str(obj.get(field, ''))) ^ neg:\n break\n else:\n result.append(obj)\n return result\n\n def delete(self, *args, **kwargs):\n if kwargs.get('q', None) == '*:*':\n self.db = {}\n elif kwargs.get('id', None):\n del self.db[kwargs['id']]\n elif kwargs.get('q', None):\n for doc in self.search(kwargs['q']):\n self.delete(id=doc['id'])\n", "step-ids": [ 3, 5, 6, 7, 8 ] }
[ 3, 5, 6, 7, 8 ]
from share_settings import Settings import urllib.request,json import pprint as p s = Settings() prefix = "http://finance.google.com/finance?client=ig&output=json&q=" def get(symbol,exchange): url = prefix+"%s:%s"%(exchange,symbol) u = urllib.request.urlopen(url) #translates url to string c = u.read().decode('utf-8') #slices string to remove characters at start/end of string con=(c[5:-2]) #removes '\' from the text cont=con.replace("\\","") content = json.loads(cont) result = (content['l']) return result def get_lp(s): """gets latest prices from google""" sl = [] for stock in s.symbols: #creates a list of latest stock prices quote = get(stock,"LON") #changes string to integer and removes ',' x = (quote.replace(',','')) x = float(x) sl.append(x) return sl #print(get_lp(s))
normal
{ "blob_id": "7247ef463998f6738c21ad8efa988a32f7fb99c0", "index": 4760, "step-1": "<mask token>\n\n\ndef get_lp(s):\n \"\"\"gets latest prices from google\"\"\"\n sl = []\n for stock in s.symbols:\n quote = get(stock, 'LON')\n x = quote.replace(',', '')\n x = float(x)\n sl.append(x)\n return sl\n", "step-2": "<mask token>\n\n\ndef get(symbol, exchange):\n url = prefix + '%s:%s' % (exchange, symbol)\n u = urllib.request.urlopen(url)\n c = u.read().decode('utf-8')\n con = c[5:-2]\n cont = con.replace('\\\\', '')\n content = json.loads(cont)\n result = content['l']\n return result\n\n\ndef get_lp(s):\n \"\"\"gets latest prices from google\"\"\"\n sl = []\n for stock in s.symbols:\n quote = get(stock, 'LON')\n x = quote.replace(',', '')\n x = float(x)\n sl.append(x)\n return sl\n", "step-3": "<mask token>\ns = Settings()\nprefix = 'http://finance.google.com/finance?client=ig&output=json&q='\n\n\ndef get(symbol, exchange):\n url = prefix + '%s:%s' % (exchange, symbol)\n u = urllib.request.urlopen(url)\n c = u.read().decode('utf-8')\n con = c[5:-2]\n cont = con.replace('\\\\', '')\n content = json.loads(cont)\n result = content['l']\n return result\n\n\ndef get_lp(s):\n \"\"\"gets latest prices from google\"\"\"\n sl = []\n for stock in s.symbols:\n quote = get(stock, 'LON')\n x = quote.replace(',', '')\n x = float(x)\n sl.append(x)\n return sl\n", "step-4": "from share_settings import Settings\nimport urllib.request, json\nimport pprint as p\ns = Settings()\nprefix = 'http://finance.google.com/finance?client=ig&output=json&q='\n\n\ndef get(symbol, exchange):\n url = prefix + '%s:%s' % (exchange, symbol)\n u = urllib.request.urlopen(url)\n c = u.read().decode('utf-8')\n con = c[5:-2]\n cont = con.replace('\\\\', '')\n content = json.loads(cont)\n result = content['l']\n return result\n\n\ndef get_lp(s):\n \"\"\"gets latest prices from google\"\"\"\n sl = []\n for stock in s.symbols:\n quote = get(stock, 'LON')\n x = quote.replace(',', '')\n x = float(x)\n sl.append(x)\n return sl\n", "step-5": "from share_settings import Settings\nimport urllib.request,json\nimport pprint as p\ns = Settings()\n\nprefix = \"http://finance.google.com/finance?client=ig&output=json&q=\"\n \ndef get(symbol,exchange):\n url = prefix+\"%s:%s\"%(exchange,symbol)\n u = urllib.request.urlopen(url)\n #translates url to string\n c = u.read().decode('utf-8')\n #slices string to remove characters at start/end of string\n con=(c[5:-2])\n #removes '\\' from the text\n cont=con.replace(\"\\\\\",\"\")\n content = json.loads(cont)\n result = (content['l'])\n return result\n\ndef get_lp(s):\n \"\"\"gets latest prices from google\"\"\"\n sl = [] \n for stock in s.symbols: \n #creates a list of latest stock prices\n quote = get(stock,\"LON\")\n #changes string to integer and removes ','\n x = (quote.replace(',',''))\n x = float(x)\n sl.append(x)\n return sl\n\n#print(get_lp(s))", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
from flask import jsonify, request, render_template, redirect, session, flash from init import app from init import mysql #Devuelve la pagina de reportes @app.route('/reportes') def reportes(): try: cur = mysql.connect().cursor() if 'usuario' in session: return render_template('views/reportes.html', id=session['id']) else: return redirect('/login') except Exception as e: print(e) return jsonify('Ha ocurrido un error') finally: cur.close() #Se accede a los reportes de compras AXIOS @app.route('/reporte_compras_api', methods=['POST']) def compras(): try: cur = mysql.connect().cursor() if not 'usuario' in session: return jsonify('Debes registrarte') else: _json = request.get_json(force=True) # _id = session['id'] _id = session['id'] _fecha1 = _json['fechaInicio'] _fecha2 = _json['fechaFin'] data = [] query = "SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod" values = (_id, _fecha1, _fecha2) cur.execute(query, values) rows = cur.fetchall() for row in rows: data.append({'id': row[0], 'descripcion': row[1], 'costo envio': row[5] ,'precio':row[3],'cantidad': int(float(row[2])),'total': row[4]}) res = jsonify(data) res.status_code = 200 return res except Exception as e: print(e) return jsonify('Ha ocurrido un error') finally: cur.close() #Se obtienen las ventas AXIOS @app.route('/reporte_ventas_api', methods=['POST']) def ventas(): try: if 'usuario' in session and session['tipo_usuario'] == 'T': cur = mysql.connect().cursor() _json = request.get_json(force=True) _id = session['id'] _fecha1 = _json['fechaInicio'] _fecha2 = _json['fechaFin'] data = [] query = "SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod" values = ("%d %M %Y",_id, _fecha1, _fecha2) cur.execute(query, values) rows = cur.fetchall() for row in rows: data.append({'id': row[0], 'descripcion': row[1], 'stock':row[2], 'publicacion':row[3], 'precio':row[4], 'tiempo envio': row[5], 'costo envio': row[6],'cantidad': float(row[7]), 'total': row[8]}) return jsonify(data) else: return jsonify('Debes registrarte como tienda') except Exception as e: print(e) return jsonify('Ha ocurrido un error') finally: cur.close() #Se otienen las facturas AXIOS @app.route('/reporte_facturas_api') def facturas(): try: cur = mysql.connect().cursor() facturas = [] data = {} #Verifica si es un comprador o tienda if session['tipo_usuario'] is 'C': cur.execute("SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s", ("%d %M %Y",session['id'],)) else: cur.execute("SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s", ("%d %M %Y",session['id'],)) rows = cur.fetchall() for row in rows: if session['tipo_usuario'] is 'C': data = {'id': row[0], 'fecha':row[1], 'total':row[2], 'metodopago':{'propietario': row[3], 'numero': row[4]}, 'tienda': row[5], 'direccionenvio': {'casillero':row[6], 'provincia':row[7]}, 'productos':[]} else: data = {'id': row[0], 'fecha':row[1], 'total':row[2], 'metodopago':{'propietario': row[3], 'numero': row[4]}, 'comprador': row[5], 'direccionenvio': {'casillero':row[6], 'provincia':row[7]}, 'productos':[]} cur.execute("SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s", (data['id'], )) prods = cur.fetchall() for prod in prods: data['productos'].append({'descripcion': prod[0], 'tiempo envio': prod[1], 'costo envio':prod[2], 'cantidad':prod[3], 'precio':prod[4]}) facturas.append(data) return jsonify(facturas) except Exception as e: print(e) return jsonify('Ha ocurrido un error') finally: cur.close() #Retornar las suscripciones del comprador AXIOS @app.route('/reporte_suscripciones_api') def suscripciones(): try: cur = mysql.connect().cursor() data = [] tiendas = [] cur.execute("SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s ", (session['id'],)) rows = cur.fetchall() for row in rows: tiendas.append({'id': row[0], 'nombre':row[1], 'foto': row[3], 'email':row[2]}) productos = [] cur.execute("SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s ", (session['id'],)) rows = cur.fetchall() for row in rows: productos.append({'id':row[4],'descripcion': row[0], 'categoria':row[1], 'precio':row[2], 'tienda': row[3]}) data.append(productos) data.append(tiendas) return jsonify(data) except Exception as e: print(e) return jsonify('Ha ocurrido un error') finally: cur.close() #Retorna productos con filtro establecido AXIOS @app.route('/reporte_ofertas_api', methods=['POST']) def ofertas(): try: cur = mysql.connect().cursor() if not 'usuario' in session: return jsonify('Debes iniciar sesion'); else: _json = request.get_json(force=True) _fecha1 = _json['fechaInicio'] _fecha2 = _json['fechaFin'] _precio= _json['precio'] _categoria = _json['categoria'] query = "SELECT p.id_prod, p.descripcion, c.descripcion, p.precio, DATE_FORMAT(p.publicacion, %s) FROM tbl_productos p LEFT JOIN tbl_categoriasproductos c ON p.id_categoria = c.id_catp WHERE p.id_categoria = %s AND p.precio <= %s AND p.publicacion BETWEEN %s AND %s " values =("%d %M %Y", _categoria, _precio, _fecha1, _fecha2) cur.execute(query, values) productos = [] rows = cur.fetchall() if rows: for row in rows: productos.append({'id':row[0],'descripcion': row[1], 'categoria':row[2], 'precio': row[3], 'publicacion':row[4]}) return jsonify(productos) except Exception as e: print(e) return jsonify('Ha ocurrido un error') finally: cur.close()
normal
{ "blob_id": "77995aab723fb118be3f986b8cd93f349690baca", "index": 2090, "step-1": "<mask token>\n\n\n@app.route('/reportes')\ndef reportes():\n try:\n cur = mysql.connect().cursor()\n if 'usuario' in session:\n return render_template('views/reportes.html', id=session['id'])\n else:\n return redirect('/login')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_compras_api', methods=['POST'])\ndef compras():\n try:\n cur = mysql.connect().cursor()\n if not 'usuario' in session:\n return jsonify('Debes registrarte')\n else:\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1],\n 'costo envio': row[5], 'precio': row[3], 'cantidad':\n int(float(row[2])), 'total': row[4]})\n res = jsonify(data)\n res.status_code = 200\n return res\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_ventas_api', methods=['POST'])\ndef ventas():\n try:\n if 'usuario' in session and session['tipo_usuario'] == 'T':\n cur = mysql.connect().cursor()\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = '%d %M %Y', _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1], 'stock':\n row[2], 'publicacion': row[3], 'precio': row[4],\n 'tiempo envio': row[5], 'costo envio': row[6],\n 'cantidad': float(row[7]), 'total': row[8]})\n return jsonify(data)\n else:\n return jsonify('Debes registrarte como tienda')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_facturas_api')\ndef facturas():\n try:\n cur = mysql.connect().cursor()\n facturas = []\n data = {}\n if session['tipo_usuario'] is 'C':\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'\n , ('%d %M %Y', session['id']))\n else:\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'\n , ('%d %M %Y', session['id']))\n rows = cur.fetchall()\n for row in rows:\n if session['tipo_usuario'] is 'C':\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'tienda': row[5], 'direccionenvio': {'casillero': row[6\n ], 'provincia': row[7]}, 'productos': []}\n else:\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'comprador': row[5], 'direccionenvio': {'casillero':\n row[6], 'provincia': row[7]}, 'productos': []}\n cur.execute(\n 'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'\n , (data['id'],))\n prods = cur.fetchall()\n for prod in prods:\n data['productos'].append({'descripcion': prod[0],\n 'tiempo envio': prod[1], 'costo envio': prod[2],\n 'cantidad': prod[3], 'precio': prod[4]})\n facturas.append(data)\n return jsonify(facturas)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\n@app.route('/reportes')\ndef reportes():\n try:\n cur = mysql.connect().cursor()\n if 'usuario' in session:\n return render_template('views/reportes.html', id=session['id'])\n else:\n return redirect('/login')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_compras_api', methods=['POST'])\ndef compras():\n try:\n cur = mysql.connect().cursor()\n if not 'usuario' in session:\n return jsonify('Debes registrarte')\n else:\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1],\n 'costo envio': row[5], 'precio': row[3], 'cantidad':\n int(float(row[2])), 'total': row[4]})\n res = jsonify(data)\n res.status_code = 200\n return res\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_ventas_api', methods=['POST'])\ndef ventas():\n try:\n if 'usuario' in session and session['tipo_usuario'] == 'T':\n cur = mysql.connect().cursor()\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = '%d %M %Y', _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1], 'stock':\n row[2], 'publicacion': row[3], 'precio': row[4],\n 'tiempo envio': row[5], 'costo envio': row[6],\n 'cantidad': float(row[7]), 'total': row[8]})\n return jsonify(data)\n else:\n return jsonify('Debes registrarte como tienda')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_facturas_api')\ndef facturas():\n try:\n cur = mysql.connect().cursor()\n facturas = []\n data = {}\n if session['tipo_usuario'] is 'C':\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'\n , ('%d %M %Y', session['id']))\n else:\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'\n , ('%d %M %Y', session['id']))\n rows = cur.fetchall()\n for row in rows:\n if session['tipo_usuario'] is 'C':\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'tienda': row[5], 'direccionenvio': {'casillero': row[6\n ], 'provincia': row[7]}, 'productos': []}\n else:\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'comprador': row[5], 'direccionenvio': {'casillero':\n row[6], 'provincia': row[7]}, 'productos': []}\n cur.execute(\n 'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'\n , (data['id'],))\n prods = cur.fetchall()\n for prod in prods:\n data['productos'].append({'descripcion': prod[0],\n 'tiempo envio': prod[1], 'costo envio': prod[2],\n 'cantidad': prod[3], 'precio': prod[4]})\n facturas.append(data)\n return jsonify(facturas)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_suscripciones_api')\ndef suscripciones():\n try:\n cur = mysql.connect().cursor()\n data = []\n tiendas = []\n cur.execute(\n 'SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s '\n , (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n tiendas.append({'id': row[0], 'nombre': row[1], 'foto': row[3],\n 'email': row[2]})\n productos = []\n cur.execute(\n 'SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s '\n , (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n productos.append({'id': row[4], 'descripcion': row[0],\n 'categoria': row[1], 'precio': row[2], 'tienda': row[3]})\n data.append(productos)\n data.append(tiendas)\n return jsonify(data)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\n@app.route('/reportes')\ndef reportes():\n try:\n cur = mysql.connect().cursor()\n if 'usuario' in session:\n return render_template('views/reportes.html', id=session['id'])\n else:\n return redirect('/login')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_compras_api', methods=['POST'])\ndef compras():\n try:\n cur = mysql.connect().cursor()\n if not 'usuario' in session:\n return jsonify('Debes registrarte')\n else:\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1],\n 'costo envio': row[5], 'precio': row[3], 'cantidad':\n int(float(row[2])), 'total': row[4]})\n res = jsonify(data)\n res.status_code = 200\n return res\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_ventas_api', methods=['POST'])\ndef ventas():\n try:\n if 'usuario' in session and session['tipo_usuario'] == 'T':\n cur = mysql.connect().cursor()\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = '%d %M %Y', _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1], 'stock':\n row[2], 'publicacion': row[3], 'precio': row[4],\n 'tiempo envio': row[5], 'costo envio': row[6],\n 'cantidad': float(row[7]), 'total': row[8]})\n return jsonify(data)\n else:\n return jsonify('Debes registrarte como tienda')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_facturas_api')\ndef facturas():\n try:\n cur = mysql.connect().cursor()\n facturas = []\n data = {}\n if session['tipo_usuario'] is 'C':\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'\n , ('%d %M %Y', session['id']))\n else:\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'\n , ('%d %M %Y', session['id']))\n rows = cur.fetchall()\n for row in rows:\n if session['tipo_usuario'] is 'C':\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'tienda': row[5], 'direccionenvio': {'casillero': row[6\n ], 'provincia': row[7]}, 'productos': []}\n else:\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'comprador': row[5], 'direccionenvio': {'casillero':\n row[6], 'provincia': row[7]}, 'productos': []}\n cur.execute(\n 'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'\n , (data['id'],))\n prods = cur.fetchall()\n for prod in prods:\n data['productos'].append({'descripcion': prod[0],\n 'tiempo envio': prod[1], 'costo envio': prod[2],\n 'cantidad': prod[3], 'precio': prod[4]})\n facturas.append(data)\n return jsonify(facturas)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_suscripciones_api')\ndef suscripciones():\n try:\n cur = mysql.connect().cursor()\n data = []\n tiendas = []\n cur.execute(\n 'SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s '\n , (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n tiendas.append({'id': row[0], 'nombre': row[1], 'foto': row[3],\n 'email': row[2]})\n productos = []\n cur.execute(\n 'SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s '\n , (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n productos.append({'id': row[4], 'descripcion': row[0],\n 'categoria': row[1], 'precio': row[2], 'tienda': row[3]})\n data.append(productos)\n data.append(tiendas)\n return jsonify(data)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_ofertas_api', methods=['POST'])\ndef ofertas():\n try:\n cur = mysql.connect().cursor()\n if not 'usuario' in session:\n return jsonify('Debes iniciar sesion')\n else:\n _json = request.get_json(force=True)\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n _precio = _json['precio']\n _categoria = _json['categoria']\n query = (\n 'SELECT p.id_prod, p.descripcion, c.descripcion, p.precio, DATE_FORMAT(p.publicacion, %s) FROM tbl_productos p LEFT JOIN tbl_categoriasproductos c ON p.id_categoria = c.id_catp WHERE p.id_categoria = %s AND p.precio <= %s AND p.publicacion BETWEEN %s AND %s '\n )\n values = '%d %M %Y', _categoria, _precio, _fecha1, _fecha2\n cur.execute(query, values)\n productos = []\n rows = cur.fetchall()\n if rows:\n for row in rows:\n productos.append({'id': row[0], 'descripcion': row[1],\n 'categoria': row[2], 'precio': row[3],\n 'publicacion': row[4]})\n return jsonify(productos)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n", "step-4": "from flask import jsonify, request, render_template, redirect, session, flash\nfrom init import app\nfrom init import mysql\n\n\n@app.route('/reportes')\ndef reportes():\n try:\n cur = mysql.connect().cursor()\n if 'usuario' in session:\n return render_template('views/reportes.html', id=session['id'])\n else:\n return redirect('/login')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_compras_api', methods=['POST'])\ndef compras():\n try:\n cur = mysql.connect().cursor()\n if not 'usuario' in session:\n return jsonify('Debes registrarte')\n else:\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1],\n 'costo envio': row[5], 'precio': row[3], 'cantidad':\n int(float(row[2])), 'total': row[4]})\n res = jsonify(data)\n res.status_code = 200\n return res\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_ventas_api', methods=['POST'])\ndef ventas():\n try:\n if 'usuario' in session and session['tipo_usuario'] == 'T':\n cur = mysql.connect().cursor()\n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n data = []\n query = (\n 'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'\n )\n values = '%d %M %Y', _id, _fecha1, _fecha2\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1], 'stock':\n row[2], 'publicacion': row[3], 'precio': row[4],\n 'tiempo envio': row[5], 'costo envio': row[6],\n 'cantidad': float(row[7]), 'total': row[8]})\n return jsonify(data)\n else:\n return jsonify('Debes registrarte como tienda')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_facturas_api')\ndef facturas():\n try:\n cur = mysql.connect().cursor()\n facturas = []\n data = {}\n if session['tipo_usuario'] is 'C':\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'\n , ('%d %M %Y', session['id']))\n else:\n cur.execute(\n 'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'\n , ('%d %M %Y', session['id']))\n rows = cur.fetchall()\n for row in rows:\n if session['tipo_usuario'] is 'C':\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'tienda': row[5], 'direccionenvio': {'casillero': row[6\n ], 'provincia': row[7]}, 'productos': []}\n else:\n data = {'id': row[0], 'fecha': row[1], 'total': row[2],\n 'metodopago': {'propietario': row[3], 'numero': row[4]},\n 'comprador': row[5], 'direccionenvio': {'casillero':\n row[6], 'provincia': row[7]}, 'productos': []}\n cur.execute(\n 'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'\n , (data['id'],))\n prods = cur.fetchall()\n for prod in prods:\n data['productos'].append({'descripcion': prod[0],\n 'tiempo envio': prod[1], 'costo envio': prod[2],\n 'cantidad': prod[3], 'precio': prod[4]})\n facturas.append(data)\n return jsonify(facturas)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_suscripciones_api')\ndef suscripciones():\n try:\n cur = mysql.connect().cursor()\n data = []\n tiendas = []\n cur.execute(\n 'SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s '\n , (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n tiendas.append({'id': row[0], 'nombre': row[1], 'foto': row[3],\n 'email': row[2]})\n productos = []\n cur.execute(\n 'SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s '\n , (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n productos.append({'id': row[4], 'descripcion': row[0],\n 'categoria': row[1], 'precio': row[2], 'tienda': row[3]})\n data.append(productos)\n data.append(tiendas)\n return jsonify(data)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n\n@app.route('/reporte_ofertas_api', methods=['POST'])\ndef ofertas():\n try:\n cur = mysql.connect().cursor()\n if not 'usuario' in session:\n return jsonify('Debes iniciar sesion')\n else:\n _json = request.get_json(force=True)\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n _precio = _json['precio']\n _categoria = _json['categoria']\n query = (\n 'SELECT p.id_prod, p.descripcion, c.descripcion, p.precio, DATE_FORMAT(p.publicacion, %s) FROM tbl_productos p LEFT JOIN tbl_categoriasproductos c ON p.id_categoria = c.id_catp WHERE p.id_categoria = %s AND p.precio <= %s AND p.publicacion BETWEEN %s AND %s '\n )\n values = '%d %M %Y', _categoria, _precio, _fecha1, _fecha2\n cur.execute(query, values)\n productos = []\n rows = cur.fetchall()\n if rows:\n for row in rows:\n productos.append({'id': row[0], 'descripcion': row[1],\n 'categoria': row[2], 'precio': row[3],\n 'publicacion': row[4]})\n return jsonify(productos)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n", "step-5": "from flask import jsonify, request, render_template, redirect, session, flash\nfrom init import app\nfrom init import mysql\n\n#Devuelve la pagina de reportes\n@app.route('/reportes')\ndef reportes():\n try:\n cur = mysql.connect().cursor()\n if 'usuario' in session:\n return render_template('views/reportes.html', id=session['id'])\n else:\n return redirect('/login')\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n#Se accede a los reportes de compras AXIOS\n@app.route('/reporte_compras_api', methods=['POST'])\ndef compras():\n try:\n cur = mysql.connect().cursor()\n\n if not 'usuario' in session:\n return jsonify('Debes registrarte')\n else:\n _json = request.get_json(force=True)\n # _id = session['id']\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n\n data = []\n query = \"SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod\"\n values = (_id, _fecha1, _fecha2)\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1], 'costo envio': row[5] ,'precio':row[3],'cantidad': int(float(row[2])),'total': row[4]})\n\n res = jsonify(data)\n res.status_code = 200\n return res\n\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n#Se obtienen las ventas AXIOS\n@app.route('/reporte_ventas_api', methods=['POST'])\ndef ventas():\n try:\n if 'usuario' in session and session['tipo_usuario'] == 'T':\n cur = mysql.connect().cursor()\n \n _json = request.get_json(force=True)\n _id = session['id']\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n\n data = []\n query = \"SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod\"\n values = (\"%d %M %Y\",_id, _fecha1, _fecha2)\n cur.execute(query, values)\n rows = cur.fetchall()\n for row in rows:\n data.append({'id': row[0], 'descripcion': row[1], 'stock':row[2], 'publicacion':row[3], 'precio':row[4], 'tiempo envio': row[5], 'costo envio': row[6],'cantidad': float(row[7]), 'total': row[8]})\n\n return jsonify(data)\n else:\n return jsonify('Debes registrarte como tienda')\n \n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n#Se otienen las facturas AXIOS\n@app.route('/reporte_facturas_api')\ndef facturas():\n try:\n cur = mysql.connect().cursor()\n\n facturas = []\n data = {}\n #Verifica si es un comprador o tienda\n if session['tipo_usuario'] is 'C':\n cur.execute(\"SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s\", (\"%d %M %Y\",session['id'],))\n else:\n cur.execute(\"SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s\", (\"%d %M %Y\",session['id'],))\n \n rows = cur.fetchall()\n for row in rows:\n if session['tipo_usuario'] is 'C':\n data = {'id': row[0], 'fecha':row[1], 'total':row[2], 'metodopago':{'propietario': row[3], 'numero': row[4]}, 'tienda': row[5], 'direccionenvio': {'casillero':row[6], 'provincia':row[7]}, 'productos':[]}\n else:\n data = {'id': row[0], 'fecha':row[1], 'total':row[2], 'metodopago':{'propietario': row[3], 'numero': row[4]}, 'comprador': row[5], 'direccionenvio': {'casillero':row[6], 'provincia':row[7]}, 'productos':[]}\n\n cur.execute(\"SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s\", (data['id'], ))\n prods = cur.fetchall()\n for prod in prods:\n data['productos'].append({'descripcion': prod[0], 'tiempo envio': prod[1], 'costo envio':prod[2], 'cantidad':prod[3], 'precio':prod[4]})\n \n facturas.append(data)\n\n return jsonify(facturas)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n#Retornar las suscripciones del comprador AXIOS\n@app.route('/reporte_suscripciones_api')\ndef suscripciones():\n try:\n cur = mysql.connect().cursor()\n data = []\n tiendas = []\n cur.execute(\"SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s \", (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n tiendas.append({'id': row[0], 'nombre':row[1], 'foto': row[3], 'email':row[2]})\n\n productos = []\n cur.execute(\"SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s \", (session['id'],))\n rows = cur.fetchall()\n for row in rows:\n productos.append({'id':row[4],'descripcion': row[0], 'categoria':row[1], 'precio':row[2], 'tienda': row[3]})\n\n\n data.append(productos)\n data.append(tiendas)\n return jsonify(data)\n\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()\n\n#Retorna productos con filtro establecido AXIOS\n@app.route('/reporte_ofertas_api', methods=['POST'])\ndef ofertas():\n try:\n cur = mysql.connect().cursor()\n if not 'usuario' in session:\n return jsonify('Debes iniciar sesion');\n else:\n _json = request.get_json(force=True)\n _fecha1 = _json['fechaInicio']\n _fecha2 = _json['fechaFin']\n _precio= _json['precio']\n _categoria = _json['categoria']\n\n query = \"SELECT p.id_prod, p.descripcion, c.descripcion, p.precio, DATE_FORMAT(p.publicacion, %s) FROM tbl_productos p LEFT JOIN tbl_categoriasproductos c ON p.id_categoria = c.id_catp WHERE p.id_categoria = %s AND p.precio <= %s AND p.publicacion BETWEEN %s AND %s \"\n values =(\"%d %M %Y\", _categoria, _precio, _fecha1, _fecha2)\n cur.execute(query, values)\n\n productos = []\n rows = cur.fetchall()\n if rows:\n for row in rows:\n productos.append({'id':row[0],'descripcion': row[1], 'categoria':row[2], 'precio': row[3], 'publicacion':row[4]})\n\n return jsonify(productos)\n except Exception as e:\n print(e)\n return jsonify('Ha ocurrido un error')\n finally:\n cur.close()", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
N = int(input()) K = int(input()) xs = list(map(int, input().split())) dist = 0 for x in xs: dist += min(x, K - x) print(dist * 2)
normal
{ "blob_id": "a65ab0faf08c13f007a132fb92f358a35834fdb7", "index": 2556, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor x in xs:\n dist += min(x, K - x)\nprint(dist * 2)\n", "step-3": "N = int(input())\nK = int(input())\nxs = list(map(int, input().split()))\ndist = 0\nfor x in xs:\n dist += min(x, K - x)\nprint(dist * 2)\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> @router.get('/{prefix_id}') def redirect_to_board(project: Project=Depends(get_project_by_prefix)): return RedirectResponse(url=project.notion_board_url) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> @router.get('/{prefix_id}') def redirect_to_board(project: Project=Depends(get_project_by_prefix)): return RedirectResponse(url=project.notion_board_url) @router.get('/{prefix_id}/{ticket_id}') def redirect_to_ticket(ticket_id: str, project: Project=Depends( get_project_by_prefix)): ticket = project.query_ticket(ticket_id=ticket_id) notion_url = setting.notion_base_url + ticket.id.replace('-', '') return RedirectResponse(url=notion_url) <|reserved_special_token_1|> <|reserved_special_token_0|> router = APIRouter(prefix='/go') @router.get('/{prefix_id}') def redirect_to_board(project: Project=Depends(get_project_by_prefix)): return RedirectResponse(url=project.notion_board_url) @router.get('/{prefix_id}/{ticket_id}') def redirect_to_ticket(ticket_id: str, project: Project=Depends( get_project_by_prefix)): ticket = project.query_ticket(ticket_id=ticket_id) notion_url = setting.notion_base_url + ticket.id.replace('-', '') return RedirectResponse(url=notion_url) <|reserved_special_token_1|> from fastapi import APIRouter, Depends from fastapi.responses import RedirectResponse import app.setting as setting from app.dependencies import get_project_by_prefix from app.entities.project import Project router = APIRouter(prefix='/go') @router.get('/{prefix_id}') def redirect_to_board(project: Project=Depends(get_project_by_prefix)): return RedirectResponse(url=project.notion_board_url) @router.get('/{prefix_id}/{ticket_id}') def redirect_to_ticket(ticket_id: str, project: Project=Depends( get_project_by_prefix)): ticket = project.query_ticket(ticket_id=ticket_id) notion_url = setting.notion_base_url + ticket.id.replace('-', '') return RedirectResponse(url=notion_url) <|reserved_special_token_1|> from fastapi import APIRouter, Depends from fastapi.responses import RedirectResponse import app.setting as setting from app.dependencies import get_project_by_prefix from app.entities.project import Project router = APIRouter( prefix="/go", ) @router.get("/{prefix_id}") def redirect_to_board(project: Project = Depends(get_project_by_prefix)): return RedirectResponse(url=project.notion_board_url) @router.get("/{prefix_id}/{ticket_id}") def redirect_to_ticket( ticket_id: str, project: Project = Depends(get_project_by_prefix) ): ticket = project.query_ticket(ticket_id=ticket_id) notion_url = setting.notion_base_url + ticket.id.replace("-", "") return RedirectResponse(url=notion_url)
flexible
{ "blob_id": "49b295c3e323695779eb32181193ef88b678b34d", "index": 6340, "step-1": "<mask token>\n\n\n@router.get('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\n@router.get('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\n@router.get('/{prefix_id}/{ticket_id}')\ndef redirect_to_ticket(ticket_id: str, project: Project=Depends(\n get_project_by_prefix)):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace('-', '')\n return RedirectResponse(url=notion_url)\n", "step-3": "<mask token>\nrouter = APIRouter(prefix='/go')\n\n\n@router.get('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\n@router.get('/{prefix_id}/{ticket_id}')\ndef redirect_to_ticket(ticket_id: str, project: Project=Depends(\n get_project_by_prefix)):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace('-', '')\n return RedirectResponse(url=notion_url)\n", "step-4": "from fastapi import APIRouter, Depends\nfrom fastapi.responses import RedirectResponse\nimport app.setting as setting\nfrom app.dependencies import get_project_by_prefix\nfrom app.entities.project import Project\nrouter = APIRouter(prefix='/go')\n\n\n@router.get('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\n@router.get('/{prefix_id}/{ticket_id}')\ndef redirect_to_ticket(ticket_id: str, project: Project=Depends(\n get_project_by_prefix)):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace('-', '')\n return RedirectResponse(url=notion_url)\n", "step-5": "from fastapi import APIRouter, Depends\nfrom fastapi.responses import RedirectResponse\n\nimport app.setting as setting\nfrom app.dependencies import get_project_by_prefix\nfrom app.entities.project import Project\n\n\nrouter = APIRouter(\n prefix=\"/go\",\n)\n\n\n@router.get(\"/{prefix_id}\")\ndef redirect_to_board(project: Project = Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\n@router.get(\"/{prefix_id}/{ticket_id}\")\ndef redirect_to_ticket(\n ticket_id: str, project: Project = Depends(get_project_by_prefix)\n):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace(\"-\", \"\")\n return RedirectResponse(url=notion_url)\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> class Solution(object): <|reserved_special_token_0|> <|reserved_special_token_1|> class Solution(object): def moveZeroes(self, nums): """ 给定一个数组 nums,编写一个函数将所有 0 移动到数组的末尾,同时保持非零元素的相对顺序。 --- 输入: [0,1,0,3,12] 输出: [1,3,12,0,0] --- 思路; :type nums: List[int] :rtype: void Do not return anything, modify nums in-place instead. """ num = nums.count(0) while 0 in nums: nums.remove(0) for i in range(num): nums.append(0) def moveZeroes1(self, nums): n = len(nums) i = 0 j = 0 while i < n: if nums[i] != 0: nums[j], nums[i] = nums[i], nums[j] j += 1 i += 1
flexible
{ "blob_id": "ece80a7765674f9d2991029bb86486b616a90f58", "index": 3944, "step-1": "<mask token>\n", "step-2": "class Solution(object):\n <mask token>\n", "step-3": "class Solution(object):\n\n def moveZeroes(self, nums):\n \"\"\"\n\t\t给定一个数组 nums,编写一个函数将所有 0 移动到数组的末尾,同时保持非零元素的相对顺序。\n\t\t---\n\t\t输入: [0,1,0,3,12]\n\t\t输出: [1,3,12,0,0]\n\t\t---\n\t\t思路;\n\n\t\t:type nums: List[int]\n\t\t:rtype: void Do not return anything, modify nums in-place instead.\n\t\t\"\"\"\n num = nums.count(0)\n while 0 in nums:\n nums.remove(0)\n for i in range(num):\n nums.append(0)\n\n def moveZeroes1(self, nums):\n n = len(nums)\n i = 0\n j = 0\n while i < n:\n if nums[i] != 0:\n nums[j], nums[i] = nums[i], nums[j]\n j += 1\n i += 1\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> def stationarity_test(mylynxts): from statsmodels.tsa.stattools import adfuller print('Results of Dickey-Fuller Test:') df_test = adfuller(mylynxts, autolag='AIC') df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value', '#lags_used', 'Number of Observation Used']) print(df_output) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def stationarity_test(mylynxts): from statsmodels.tsa.stattools import adfuller print('Results of Dickey-Fuller Test:') df_test = adfuller(mylynxts, autolag='AIC') df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value', '#lags_used', 'Number of Observation Used']) print(df_output) stationarity_test(mylynxts) <|reserved_special_token_0|> plt.plot(mylynxts) plt.plot(results_AR.fittedvalues, color='red') <|reserved_special_token_0|> results_AR.summary() <|reserved_special_token_0|> plt.figure(figsize=(10, 6)) plt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label= 'residuals') <|reserved_special_token_0|> plt.plot(x, p, 'm', linewidth=2) plt.grid(axis='y', alpha=0.2) plt.xlabel('Residuals') plt.ylabel('Density') plt.title('Residuals 2,0,0 vs Normal Distribution - Mean =' + str(round(mu, 2)) + ', std =' + str(round(std, 2))) plt.show() <|reserved_special_token_1|> <|reserved_special_token_0|> rcParams['figure.figsize'] = 15, 6 mylynx_df = pd.read_csv('LYNXdata.csv', header=0, names=['year', 'trappings'], index_col=0) mylynxts = pd.Series(mylynx_df['trappings'].values, index=pd.DatetimeIndex( data=tuple(pd.date_range(31 / 12 / 1821, periods=114, freq='A-DEC')), freq='A-DEC')) def stationarity_test(mylynxts): from statsmodels.tsa.stattools import adfuller print('Results of Dickey-Fuller Test:') df_test = adfuller(mylynxts, autolag='AIC') df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value', '#lags_used', 'Number of Observation Used']) print(df_output) stationarity_test(mylynxts) model = ARIMA(mylynxts, order=(3, 0, 0)) results_AR = model.fit() plt.plot(mylynxts) plt.plot(results_AR.fittedvalues, color='red') <|reserved_special_token_0|> results_AR.summary() fig = plt.figure(figsize=(12, 8)) ax1 = fig.add_subplot(211) fig = plot_acf(results_AR.resid, lags=20, ax=ax1) <|reserved_special_token_0|> plt.figure(figsize=(10, 6)) plt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label= 'residuals') mu, std = norm.fit(results_AR.resid) xmin, xmax = plt.xlim() x = np.linspace(xmin, xmax, 100) p = norm.pdf(x, mu, std) plt.plot(x, p, 'm', linewidth=2) plt.grid(axis='y', alpha=0.2) plt.xlabel('Residuals') plt.ylabel('Density') plt.title('Residuals 2,0,0 vs Normal Distribution - Mean =' + str(round(mu, 2)) + ', std =' + str(round(std, 2))) plt.show() <|reserved_special_token_1|> <|reserved_special_token_0|> import matplotlib.pylab as plt import pandas as pd import numpy as np import statsmodels as sm from statsmodels.graphics.tsaplots import plot_acf, plot_pacf from statsmodels.tsa.stattools import acf, pacf from statsmodels.tsa.arima_model import ARIMA from matplotlib.pylab import rcParams rcParams['figure.figsize'] = 15, 6 mylynx_df = pd.read_csv('LYNXdata.csv', header=0, names=['year', 'trappings'], index_col=0) mylynxts = pd.Series(mylynx_df['trappings'].values, index=pd.DatetimeIndex( data=tuple(pd.date_range(31 / 12 / 1821, periods=114, freq='A-DEC')), freq='A-DEC')) def stationarity_test(mylynxts): from statsmodels.tsa.stattools import adfuller print('Results of Dickey-Fuller Test:') df_test = adfuller(mylynxts, autolag='AIC') df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value', '#lags_used', 'Number of Observation Used']) print(df_output) stationarity_test(mylynxts) model = ARIMA(mylynxts, order=(3, 0, 0)) results_AR = model.fit() plt.plot(mylynxts) plt.plot(results_AR.fittedvalues, color='red') <|reserved_special_token_0|> results_AR.summary() fig = plt.figure(figsize=(12, 8)) ax1 = fig.add_subplot(211) fig = plot_acf(results_AR.resid, lags=20, ax=ax1) from scipy.stats import norm plt.figure(figsize=(10, 6)) plt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label= 'residuals') mu, std = norm.fit(results_AR.resid) xmin, xmax = plt.xlim() x = np.linspace(xmin, xmax, 100) p = norm.pdf(x, mu, std) plt.plot(x, p, 'm', linewidth=2) plt.grid(axis='y', alpha=0.2) plt.xlabel('Residuals') plt.ylabel('Density') plt.title('Residuals 2,0,0 vs Normal Distribution - Mean =' + str(round(mu, 2)) + ', std =' + str(round(std, 2))) plt.show() <|reserved_special_token_1|> #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri May 24 18:46:26 2019 @author: kiran """ import matplotlib.pylab as plt import pandas as pd import numpy as np import statsmodels as sm from statsmodels.graphics.tsaplots import plot_acf, plot_pacf from statsmodels.tsa.stattools import acf, pacf from statsmodels.tsa.arima_model import ARIMA from matplotlib.pylab import rcParams rcParams['figure.figsize'] = 15,6 #importing library and preparing dataset mylynx_df = pd.read_csv('LYNXdata.csv', header = 0, names = ['year','trappings'], index_col=0) mylynxts = pd.Series(mylynx_df['trappings'].values, index = pd.DatetimeIndex(data=(tuple(pd.date_range(31/12/1821, periods = 114, freq = 'A-DEC'))), freq= 'A-DEC')) #Dickey-fuller test def stationarity_test(mylynxts): from statsmodels.tsa.stattools import adfuller print('Results of Dickey-Fuller Test:') df_test = adfuller(mylynxts, autolag='AIC') df_output = pd.Series(df_test[0:4], index=['Test Statistic','p-value','#lags_used','Number of Observation Used']) print(df_output) stationarity_test(mylynxts) #Arima Model model = ARIMA(mylynxts, order=(3,0,0)) results_AR = model.fit() plt.plot(mylynxts) plt.plot(results_AR.fittedvalues, color='red') ''' information criteria and resdiuals need to be checked. ''' #information summary results_AR.summary() #residual plot fig = plt.figure(figsize=(12,8)) ax1 = fig.add_subplot(211) fig = plot_acf(results_AR.resid, lags=20, ax = ax1) #importing function for nomral distribution from scipy.stats import norm plt.figure(figsize=(10,6)) plt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label='residuals') #density true - norm.dist line curve mu,std = norm.fit(results_AR.resid) xmin,xmax = plt.xlim() x = np.linspace(xmin,xmax,100) p = norm.pdf(x,mu,std) plt.plot(x,p,'m',linewidth=2) plt.grid(axis='y',alpha=0.2) plt.xlabel('Residuals') plt.ylabel('Density') plt.title('Residuals 2,0,0 vs Normal Distribution - Mean ='+ str(round(mu,2))+', std ='+str(round(std,2))) plt.show()
flexible
{ "blob_id": "8e28135da60f8e11459697c4ae9c63e60c437d7a", "index": 9501, "step-1": "<mask token>\n\n\ndef stationarity_test(mylynxts):\n from statsmodels.tsa.stattools import adfuller\n print('Results of Dickey-Fuller Test:')\n df_test = adfuller(mylynxts, autolag='AIC')\n df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value',\n '#lags_used', 'Number of Observation Used'])\n print(df_output)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef stationarity_test(mylynxts):\n from statsmodels.tsa.stattools import adfuller\n print('Results of Dickey-Fuller Test:')\n df_test = adfuller(mylynxts, autolag='AIC')\n df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value',\n '#lags_used', 'Number of Observation Used'])\n print(df_output)\n\n\nstationarity_test(mylynxts)\n<mask token>\nplt.plot(mylynxts)\nplt.plot(results_AR.fittedvalues, color='red')\n<mask token>\nresults_AR.summary()\n<mask token>\nplt.figure(figsize=(10, 6))\nplt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label=\n 'residuals')\n<mask token>\nplt.plot(x, p, 'm', linewidth=2)\nplt.grid(axis='y', alpha=0.2)\nplt.xlabel('Residuals')\nplt.ylabel('Density')\nplt.title('Residuals 2,0,0 vs Normal Distribution - Mean =' + str(round(mu,\n 2)) + ', std =' + str(round(std, 2)))\nplt.show()\n", "step-3": "<mask token>\nrcParams['figure.figsize'] = 15, 6\nmylynx_df = pd.read_csv('LYNXdata.csv', header=0, names=['year',\n 'trappings'], index_col=0)\nmylynxts = pd.Series(mylynx_df['trappings'].values, index=pd.DatetimeIndex(\n data=tuple(pd.date_range(31 / 12 / 1821, periods=114, freq='A-DEC')),\n freq='A-DEC'))\n\n\ndef stationarity_test(mylynxts):\n from statsmodels.tsa.stattools import adfuller\n print('Results of Dickey-Fuller Test:')\n df_test = adfuller(mylynxts, autolag='AIC')\n df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value',\n '#lags_used', 'Number of Observation Used'])\n print(df_output)\n\n\nstationarity_test(mylynxts)\nmodel = ARIMA(mylynxts, order=(3, 0, 0))\nresults_AR = model.fit()\nplt.plot(mylynxts)\nplt.plot(results_AR.fittedvalues, color='red')\n<mask token>\nresults_AR.summary()\nfig = plt.figure(figsize=(12, 8))\nax1 = fig.add_subplot(211)\nfig = plot_acf(results_AR.resid, lags=20, ax=ax1)\n<mask token>\nplt.figure(figsize=(10, 6))\nplt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label=\n 'residuals')\nmu, std = norm.fit(results_AR.resid)\nxmin, xmax = plt.xlim()\nx = np.linspace(xmin, xmax, 100)\np = norm.pdf(x, mu, std)\nplt.plot(x, p, 'm', linewidth=2)\nplt.grid(axis='y', alpha=0.2)\nplt.xlabel('Residuals')\nplt.ylabel('Density')\nplt.title('Residuals 2,0,0 vs Normal Distribution - Mean =' + str(round(mu,\n 2)) + ', std =' + str(round(std, 2)))\nplt.show()\n", "step-4": "<mask token>\nimport matplotlib.pylab as plt\nimport pandas as pd\nimport numpy as np\nimport statsmodels as sm\nfrom statsmodels.graphics.tsaplots import plot_acf, plot_pacf\nfrom statsmodels.tsa.stattools import acf, pacf\nfrom statsmodels.tsa.arima_model import ARIMA\nfrom matplotlib.pylab import rcParams\nrcParams['figure.figsize'] = 15, 6\nmylynx_df = pd.read_csv('LYNXdata.csv', header=0, names=['year',\n 'trappings'], index_col=0)\nmylynxts = pd.Series(mylynx_df['trappings'].values, index=pd.DatetimeIndex(\n data=tuple(pd.date_range(31 / 12 / 1821, periods=114, freq='A-DEC')),\n freq='A-DEC'))\n\n\ndef stationarity_test(mylynxts):\n from statsmodels.tsa.stattools import adfuller\n print('Results of Dickey-Fuller Test:')\n df_test = adfuller(mylynxts, autolag='AIC')\n df_output = pd.Series(df_test[0:4], index=['Test Statistic', 'p-value',\n '#lags_used', 'Number of Observation Used'])\n print(df_output)\n\n\nstationarity_test(mylynxts)\nmodel = ARIMA(mylynxts, order=(3, 0, 0))\nresults_AR = model.fit()\nplt.plot(mylynxts)\nplt.plot(results_AR.fittedvalues, color='red')\n<mask token>\nresults_AR.summary()\nfig = plt.figure(figsize=(12, 8))\nax1 = fig.add_subplot(211)\nfig = plot_acf(results_AR.resid, lags=20, ax=ax1)\nfrom scipy.stats import norm\nplt.figure(figsize=(10, 6))\nplt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label=\n 'residuals')\nmu, std = norm.fit(results_AR.resid)\nxmin, xmax = plt.xlim()\nx = np.linspace(xmin, xmax, 100)\np = norm.pdf(x, mu, std)\nplt.plot(x, p, 'm', linewidth=2)\nplt.grid(axis='y', alpha=0.2)\nplt.xlabel('Residuals')\nplt.ylabel('Density')\nplt.title('Residuals 2,0,0 vs Normal Distribution - Mean =' + str(round(mu,\n 2)) + ', std =' + str(round(std, 2)))\nplt.show()\n", "step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri May 24 18:46:26 2019\n@author: kiran\n\"\"\"\nimport matplotlib.pylab as plt\nimport pandas as pd\nimport numpy as np\nimport statsmodels as sm\nfrom statsmodels.graphics.tsaplots import plot_acf, plot_pacf\nfrom statsmodels.tsa.stattools import acf, pacf\nfrom statsmodels.tsa.arima_model import ARIMA\nfrom matplotlib.pylab import rcParams\nrcParams['figure.figsize'] = 15,6\n\n#importing library and preparing dataset\nmylynx_df = pd.read_csv('LYNXdata.csv', header = 0, names = ['year','trappings'], index_col=0)\nmylynxts = pd.Series(mylynx_df['trappings'].values, index = pd.DatetimeIndex(data=(tuple(pd.date_range(31/12/1821, periods = 114, freq = 'A-DEC'))), freq= 'A-DEC'))\n\n#Dickey-fuller test\ndef stationarity_test(mylynxts):\n from statsmodels.tsa.stattools import adfuller\n print('Results of Dickey-Fuller Test:')\n df_test = adfuller(mylynxts, autolag='AIC')\n df_output = pd.Series(df_test[0:4], index=['Test Statistic','p-value','#lags_used','Number of Observation Used'])\n print(df_output)\nstationarity_test(mylynxts)\n\n#Arima Model\nmodel = ARIMA(mylynxts, order=(3,0,0))\nresults_AR = model.fit()\nplt.plot(mylynxts)\nplt.plot(results_AR.fittedvalues, color='red')\n\n'''\ninformation criteria and resdiuals need to be checked.\n'''\n#information summary\nresults_AR.summary()\n\n\n#residual plot\nfig = plt.figure(figsize=(12,8))\nax1 = fig.add_subplot(211)\nfig = plot_acf(results_AR.resid, lags=20, ax = ax1)\n\n#importing function for nomral distribution\nfrom scipy.stats import norm\nplt.figure(figsize=(10,6))\nplt.hist(results_AR.resid, bins='auto', density=True, rwidth=0.85, label='residuals') #density true - norm.dist line curve\nmu,std = norm.fit(results_AR.resid)\nxmin,xmax = plt.xlim()\nx = np.linspace(xmin,xmax,100)\np = norm.pdf(x,mu,std)\nplt.plot(x,p,'m',linewidth=2)\nplt.grid(axis='y',alpha=0.2)\nplt.xlabel('Residuals')\nplt.ylabel('Density')\nplt.title('Residuals 2,0,0 vs Normal Distribution - Mean ='+ str(round(mu,2))+', std ='+str(round(std,2)))\nplt.show()\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
import os import stat from optparse import OptionParser from bbpgsql.configuration import get_config_from_filename_and_set_up_logging from bbpgsql.configuration.general import get_data_dir from subprocess import check_output import sys VERSION = '' class BadArgumentException(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return self.msg class TooManyArgumentsException(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return self.msg class NotEnoughArgumentsException(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return self.msg class UsedArchivepgsqlAsArchiveWAL(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return self.msg def get_version(): # override "version" with a constant string for release version = VERSION or check_output(['git', 'describe']).strip() return ' '.join(['%prog', version]) def create_common_parser(**kwargs): kwargs['version'] = get_version() parser = OptionParser(**kwargs) parser.add_option('-c', '--config', dest='config_file', help='configuration file', default='/etc/bbpgsql.ini') parser.add_option('--dry-run', dest='dry_run', help='test run - do not actually modify any files', action='store_true', default=False) return parser def common_parse_args(args=None): parser = create_common_parser() options, args = parser.parse_args(args) return parser, options, args def common_validate_options_and_args(options=None, args=None): if not os.path.exists(options.config_file): raise Exception("File %s does not exist" % (options.config_file)) if not os.access(options.config_file, os.R_OK): raise Exception("No read access for %s" % (options.config_file)) config_stats = os.stat(options.config_file) if ((config_stats.st_mode & stat.S_IRWXG) | (config_stats.st_mode & stat.S_IRWXO)): raise Exception("File %s has open group or other permissions" % (options.config_file)) return True def non_destructive_minimal_parse_and_validate_args(args=None): args = args or sys.argv[:] parser, options, args = common_parse_args(args) common_validate_options_and_args(options, args) return options, args def archivewal_parse_args(args=None): archivewal_usage = ' '.join([ os.path.basename(sys.argv[0]), '[options]', '<path_to_wal_file_to_archive>']) parser = create_common_parser(usage=archivewal_usage) options, args = parser.parse_args(args) return parser, options, args def is_relative_path(wal_path): return not os.path.isabs(wal_path) def wal_file_exists(config, wal_path): return os.path.isfile(get_wal_filename(config, wal_path)) def get_wal_filename(config, wal_path): data_dir = get_data_dir(config) return os.path.join(data_dir, wal_path) def is_valid_file(config, wal_path): return is_relative_path(wal_path) and wal_file_exists(config, wal_path) def archivewal_validate_options_and_args(options=None, args=None): args = args or [] if not common_validate_options_and_args(options, args): return False config = get_config_from_filename_and_set_up_logging(options.config_file) if len(args) != 1 or not is_valid_file(config, args[0]): raise Exception('A relative path to a WAL file to be archived' \ ' must be provided!') return True def archivepgsql_parse_args(args=None): archivepgsql_usage = ' '.join([ os.path.basename(sys.argv[0]), '[options]']) parser = create_common_parser(usage=archivepgsql_usage) options, args = parser.parse_args(args) return parser, options, args def archivepgsql_validate_options_and_args(options=None, args=None): if not common_validate_options_and_args(options, args): return False if args: if args[0].startswith('pg_xlog'): raise UsedArchivepgsqlAsArchiveWAL('archivepgsql was called with' \ ' a WAL file path as an argument. This is' \ ' probably due to configuring archivepgsql' \ ' as the archive_command in the PGSQL' \ ' configuration instead of archivewal.') raise TooManyArgumentsException('archivepgsql should not be called' \ ' with any arguments. Are you using it as the' \ ' archive_command instead of archivewal?') return True def restorewal_parse_args(args=None): restorewal_usage = ' '.join([ os.path.basename(sys.argv[0]), '[options]', '<name_of_wal_file_to_restore>', '<path_to_write_restored_file>', ]) parser = create_common_parser(usage=restorewal_usage) options, args = parser.parse_args(args) return parser, options, args def restorewal_validate_options_and_args(options=None, args=None): args = args or [] if not common_validate_options_and_args(options, args): return False nargs = len(args) if nargs != 2: raise Exception('restorewal must be given the name of the WAL' \ ' file to retrieve and the destination path to' \ ' restore to.') return True def storagestats_parse_args(args=None): storagestats_usage = ' '.join([ os.path.basename(sys.argv[0]), '[options]']) parser = create_common_parser(usage=storagestats_usage) options, args = parser.parse_args(args) return parser, options, args def storagestats_validate_options_and_args(options=None, args=None): if not common_validate_options_and_args(options, args): return False if args: raise TooManyArgumentsException('storagestats takes no arguments') return True
normal
{ "blob_id": "eed79a3895975a0475c0b192bd8a42e80def2e78", "index": 2502, "step-1": "<mask token>\n\n\nclass BadArgumentException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass TooManyArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass NotEnoughArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass UsedArchivepgsqlAsArchiveWAL(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\ndef get_version():\n version = VERSION or check_output(['git', 'describe']).strip()\n return ' '.join(['%prog', version])\n\n\ndef create_common_parser(**kwargs):\n kwargs['version'] = get_version()\n parser = OptionParser(**kwargs)\n parser.add_option('-c', '--config', dest='config_file', help=\n 'configuration file', default='/etc/bbpgsql.ini')\n parser.add_option('--dry-run', dest='dry_run', help=\n 'test run - do not actually modify any files', action='store_true',\n default=False)\n return parser\n\n\ndef common_parse_args(args=None):\n parser = create_common_parser()\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef common_validate_options_and_args(options=None, args=None):\n if not os.path.exists(options.config_file):\n raise Exception('File %s does not exist' % options.config_file)\n if not os.access(options.config_file, os.R_OK):\n raise Exception('No read access for %s' % options.config_file)\n config_stats = os.stat(options.config_file)\n if (config_stats.st_mode & stat.S_IRWXG | config_stats.st_mode & stat.\n S_IRWXO):\n raise Exception('File %s has open group or other permissions' %\n options.config_file)\n return True\n\n\ndef non_destructive_minimal_parse_and_validate_args(args=None):\n args = args or sys.argv[:]\n parser, options, args = common_parse_args(args)\n common_validate_options_and_args(options, args)\n return options, args\n\n\n<mask token>\n\n\ndef wal_file_exists(config, wal_path):\n return os.path.isfile(get_wal_filename(config, wal_path))\n\n\ndef get_wal_filename(config, wal_path):\n data_dir = get_data_dir(config)\n return os.path.join(data_dir, wal_path)\n\n\ndef is_valid_file(config, wal_path):\n return is_relative_path(wal_path) and wal_file_exists(config, wal_path)\n\n\n<mask token>\n\n\ndef archivepgsql_parse_args(args=None):\n archivepgsql_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=archivepgsql_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef archivepgsql_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n if args[0].startswith('pg_xlog'):\n raise UsedArchivepgsqlAsArchiveWAL(\n 'archivepgsql was called with a WAL file path as an argument. This is probably due to configuring archivepgsql as the archive_command in the PGSQL configuration instead of archivewal.'\n )\n raise TooManyArgumentsException(\n 'archivepgsql should not be called with any arguments. Are you using it as the archive_command instead of archivewal?'\n )\n return True\n\n\ndef restorewal_parse_args(args=None):\n restorewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',\n '<name_of_wal_file_to_restore>', '<path_to_write_restored_file>'])\n parser = create_common_parser(usage=restorewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef restorewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n nargs = len(args)\n if nargs != 2:\n raise Exception(\n 'restorewal must be given the name of the WAL file to retrieve and the destination path to restore to.'\n )\n return True\n\n\ndef storagestats_parse_args(args=None):\n storagestats_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=storagestats_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass BadArgumentException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass TooManyArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass NotEnoughArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass UsedArchivepgsqlAsArchiveWAL(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\ndef get_version():\n version = VERSION or check_output(['git', 'describe']).strip()\n return ' '.join(['%prog', version])\n\n\ndef create_common_parser(**kwargs):\n kwargs['version'] = get_version()\n parser = OptionParser(**kwargs)\n parser.add_option('-c', '--config', dest='config_file', help=\n 'configuration file', default='/etc/bbpgsql.ini')\n parser.add_option('--dry-run', dest='dry_run', help=\n 'test run - do not actually modify any files', action='store_true',\n default=False)\n return parser\n\n\ndef common_parse_args(args=None):\n parser = create_common_parser()\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef common_validate_options_and_args(options=None, args=None):\n if not os.path.exists(options.config_file):\n raise Exception('File %s does not exist' % options.config_file)\n if not os.access(options.config_file, os.R_OK):\n raise Exception('No read access for %s' % options.config_file)\n config_stats = os.stat(options.config_file)\n if (config_stats.st_mode & stat.S_IRWXG | config_stats.st_mode & stat.\n S_IRWXO):\n raise Exception('File %s has open group or other permissions' %\n options.config_file)\n return True\n\n\ndef non_destructive_minimal_parse_and_validate_args(args=None):\n args = args or sys.argv[:]\n parser, options, args = common_parse_args(args)\n common_validate_options_and_args(options, args)\n return options, args\n\n\n<mask token>\n\n\ndef wal_file_exists(config, wal_path):\n return os.path.isfile(get_wal_filename(config, wal_path))\n\n\ndef get_wal_filename(config, wal_path):\n data_dir = get_data_dir(config)\n return os.path.join(data_dir, wal_path)\n\n\ndef is_valid_file(config, wal_path):\n return is_relative_path(wal_path) and wal_file_exists(config, wal_path)\n\n\n<mask token>\n\n\ndef archivepgsql_parse_args(args=None):\n archivepgsql_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=archivepgsql_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef archivepgsql_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n if args[0].startswith('pg_xlog'):\n raise UsedArchivepgsqlAsArchiveWAL(\n 'archivepgsql was called with a WAL file path as an argument. This is probably due to configuring archivepgsql as the archive_command in the PGSQL configuration instead of archivewal.'\n )\n raise TooManyArgumentsException(\n 'archivepgsql should not be called with any arguments. Are you using it as the archive_command instead of archivewal?'\n )\n return True\n\n\ndef restorewal_parse_args(args=None):\n restorewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',\n '<name_of_wal_file_to_restore>', '<path_to_write_restored_file>'])\n parser = create_common_parser(usage=restorewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef restorewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n nargs = len(args)\n if nargs != 2:\n raise Exception(\n 'restorewal must be given the name of the WAL file to retrieve and the destination path to restore to.'\n )\n return True\n\n\ndef storagestats_parse_args(args=None):\n storagestats_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=storagestats_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef storagestats_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n raise TooManyArgumentsException('storagestats takes no arguments')\n return True\n", "step-3": "<mask token>\n\n\nclass BadArgumentException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass TooManyArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass NotEnoughArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass UsedArchivepgsqlAsArchiveWAL(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\ndef get_version():\n version = VERSION or check_output(['git', 'describe']).strip()\n return ' '.join(['%prog', version])\n\n\ndef create_common_parser(**kwargs):\n kwargs['version'] = get_version()\n parser = OptionParser(**kwargs)\n parser.add_option('-c', '--config', dest='config_file', help=\n 'configuration file', default='/etc/bbpgsql.ini')\n parser.add_option('--dry-run', dest='dry_run', help=\n 'test run - do not actually modify any files', action='store_true',\n default=False)\n return parser\n\n\ndef common_parse_args(args=None):\n parser = create_common_parser()\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef common_validate_options_and_args(options=None, args=None):\n if not os.path.exists(options.config_file):\n raise Exception('File %s does not exist' % options.config_file)\n if not os.access(options.config_file, os.R_OK):\n raise Exception('No read access for %s' % options.config_file)\n config_stats = os.stat(options.config_file)\n if (config_stats.st_mode & stat.S_IRWXG | config_stats.st_mode & stat.\n S_IRWXO):\n raise Exception('File %s has open group or other permissions' %\n options.config_file)\n return True\n\n\ndef non_destructive_minimal_parse_and_validate_args(args=None):\n args = args or sys.argv[:]\n parser, options, args = common_parse_args(args)\n common_validate_options_and_args(options, args)\n return options, args\n\n\ndef archivewal_parse_args(args=None):\n archivewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',\n '<path_to_wal_file_to_archive>'])\n parser = create_common_parser(usage=archivewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef is_relative_path(wal_path):\n return not os.path.isabs(wal_path)\n\n\ndef wal_file_exists(config, wal_path):\n return os.path.isfile(get_wal_filename(config, wal_path))\n\n\ndef get_wal_filename(config, wal_path):\n data_dir = get_data_dir(config)\n return os.path.join(data_dir, wal_path)\n\n\ndef is_valid_file(config, wal_path):\n return is_relative_path(wal_path) and wal_file_exists(config, wal_path)\n\n\ndef archivewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n config = get_config_from_filename_and_set_up_logging(options.config_file)\n if len(args) != 1 or not is_valid_file(config, args[0]):\n raise Exception(\n 'A relative path to a WAL file to be archived must be provided!')\n return True\n\n\ndef archivepgsql_parse_args(args=None):\n archivepgsql_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=archivepgsql_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef archivepgsql_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n if args[0].startswith('pg_xlog'):\n raise UsedArchivepgsqlAsArchiveWAL(\n 'archivepgsql was called with a WAL file path as an argument. This is probably due to configuring archivepgsql as the archive_command in the PGSQL configuration instead of archivewal.'\n )\n raise TooManyArgumentsException(\n 'archivepgsql should not be called with any arguments. Are you using it as the archive_command instead of archivewal?'\n )\n return True\n\n\ndef restorewal_parse_args(args=None):\n restorewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',\n '<name_of_wal_file_to_restore>', '<path_to_write_restored_file>'])\n parser = create_common_parser(usage=restorewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef restorewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n nargs = len(args)\n if nargs != 2:\n raise Exception(\n 'restorewal must be given the name of the WAL file to retrieve and the destination path to restore to.'\n )\n return True\n\n\ndef storagestats_parse_args(args=None):\n storagestats_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=storagestats_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef storagestats_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n raise TooManyArgumentsException('storagestats takes no arguments')\n return True\n", "step-4": "import os\nimport stat\nfrom optparse import OptionParser\nfrom bbpgsql.configuration import get_config_from_filename_and_set_up_logging\nfrom bbpgsql.configuration.general import get_data_dir\nfrom subprocess import check_output\nimport sys\nVERSION = ''\n\n\nclass BadArgumentException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass TooManyArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass NotEnoughArgumentsException(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass UsedArchivepgsqlAsArchiveWAL(Exception):\n\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\ndef get_version():\n version = VERSION or check_output(['git', 'describe']).strip()\n return ' '.join(['%prog', version])\n\n\ndef create_common_parser(**kwargs):\n kwargs['version'] = get_version()\n parser = OptionParser(**kwargs)\n parser.add_option('-c', '--config', dest='config_file', help=\n 'configuration file', default='/etc/bbpgsql.ini')\n parser.add_option('--dry-run', dest='dry_run', help=\n 'test run - do not actually modify any files', action='store_true',\n default=False)\n return parser\n\n\ndef common_parse_args(args=None):\n parser = create_common_parser()\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef common_validate_options_and_args(options=None, args=None):\n if not os.path.exists(options.config_file):\n raise Exception('File %s does not exist' % options.config_file)\n if not os.access(options.config_file, os.R_OK):\n raise Exception('No read access for %s' % options.config_file)\n config_stats = os.stat(options.config_file)\n if (config_stats.st_mode & stat.S_IRWXG | config_stats.st_mode & stat.\n S_IRWXO):\n raise Exception('File %s has open group or other permissions' %\n options.config_file)\n return True\n\n\ndef non_destructive_minimal_parse_and_validate_args(args=None):\n args = args or sys.argv[:]\n parser, options, args = common_parse_args(args)\n common_validate_options_and_args(options, args)\n return options, args\n\n\ndef archivewal_parse_args(args=None):\n archivewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',\n '<path_to_wal_file_to_archive>'])\n parser = create_common_parser(usage=archivewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef is_relative_path(wal_path):\n return not os.path.isabs(wal_path)\n\n\ndef wal_file_exists(config, wal_path):\n return os.path.isfile(get_wal_filename(config, wal_path))\n\n\ndef get_wal_filename(config, wal_path):\n data_dir = get_data_dir(config)\n return os.path.join(data_dir, wal_path)\n\n\ndef is_valid_file(config, wal_path):\n return is_relative_path(wal_path) and wal_file_exists(config, wal_path)\n\n\ndef archivewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n config = get_config_from_filename_and_set_up_logging(options.config_file)\n if len(args) != 1 or not is_valid_file(config, args[0]):\n raise Exception(\n 'A relative path to a WAL file to be archived must be provided!')\n return True\n\n\ndef archivepgsql_parse_args(args=None):\n archivepgsql_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=archivepgsql_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef archivepgsql_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n if args[0].startswith('pg_xlog'):\n raise UsedArchivepgsqlAsArchiveWAL(\n 'archivepgsql was called with a WAL file path as an argument. This is probably due to configuring archivepgsql as the archive_command in the PGSQL configuration instead of archivewal.'\n )\n raise TooManyArgumentsException(\n 'archivepgsql should not be called with any arguments. Are you using it as the archive_command instead of archivewal?'\n )\n return True\n\n\ndef restorewal_parse_args(args=None):\n restorewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',\n '<name_of_wal_file_to_restore>', '<path_to_write_restored_file>'])\n parser = create_common_parser(usage=restorewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef restorewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n nargs = len(args)\n if nargs != 2:\n raise Exception(\n 'restorewal must be given the name of the WAL file to retrieve and the destination path to restore to.'\n )\n return True\n\n\ndef storagestats_parse_args(args=None):\n storagestats_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])\n parser = create_common_parser(usage=storagestats_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef storagestats_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n raise TooManyArgumentsException('storagestats takes no arguments')\n return True\n", "step-5": "import os\nimport stat\nfrom optparse import OptionParser\nfrom bbpgsql.configuration import get_config_from_filename_and_set_up_logging\nfrom bbpgsql.configuration.general import get_data_dir\nfrom subprocess import check_output\nimport sys\n\nVERSION = ''\n\n\nclass BadArgumentException(Exception):\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass TooManyArgumentsException(Exception):\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass NotEnoughArgumentsException(Exception):\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nclass UsedArchivepgsqlAsArchiveWAL(Exception):\n def __init__(self, msg):\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\ndef get_version():\n # override \"version\" with a constant string for release\n version = VERSION or check_output(['git', 'describe']).strip()\n return ' '.join(['%prog', version])\n\n\ndef create_common_parser(**kwargs):\n kwargs['version'] = get_version()\n parser = OptionParser(**kwargs)\n\n parser.add_option('-c', '--config', dest='config_file',\n help='configuration file', default='/etc/bbpgsql.ini')\n\n parser.add_option('--dry-run', dest='dry_run',\n help='test run - do not actually modify any files',\n action='store_true',\n default=False)\n\n return parser\n\n\ndef common_parse_args(args=None):\n parser = create_common_parser()\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef common_validate_options_and_args(options=None, args=None):\n if not os.path.exists(options.config_file):\n raise Exception(\"File %s does not exist\" % (options.config_file))\n if not os.access(options.config_file, os.R_OK):\n raise Exception(\"No read access for %s\" % (options.config_file))\n config_stats = os.stat(options.config_file)\n if ((config_stats.st_mode & stat.S_IRWXG) |\n (config_stats.st_mode & stat.S_IRWXO)):\n raise Exception(\"File %s has open group or other permissions\" %\n (options.config_file))\n return True\n\n\ndef non_destructive_minimal_parse_and_validate_args(args=None):\n args = args or sys.argv[:]\n parser, options, args = common_parse_args(args)\n common_validate_options_and_args(options, args)\n return options, args\n\n\ndef archivewal_parse_args(args=None):\n archivewal_usage = ' '.join([\n os.path.basename(sys.argv[0]),\n '[options]',\n '<path_to_wal_file_to_archive>'])\n parser = create_common_parser(usage=archivewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef is_relative_path(wal_path):\n return not os.path.isabs(wal_path)\n\n\ndef wal_file_exists(config, wal_path):\n return os.path.isfile(get_wal_filename(config, wal_path))\n\n\ndef get_wal_filename(config, wal_path):\n data_dir = get_data_dir(config)\n return os.path.join(data_dir, wal_path)\n\n\ndef is_valid_file(config, wal_path):\n return is_relative_path(wal_path) and wal_file_exists(config, wal_path)\n\n\ndef archivewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n config = get_config_from_filename_and_set_up_logging(options.config_file)\n if len(args) != 1 or not is_valid_file(config, args[0]):\n raise Exception('A relative path to a WAL file to be archived' \\\n ' must be provided!')\n return True\n\n\ndef archivepgsql_parse_args(args=None):\n archivepgsql_usage = ' '.join([\n os.path.basename(sys.argv[0]),\n '[options]'])\n parser = create_common_parser(usage=archivepgsql_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef archivepgsql_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n if args[0].startswith('pg_xlog'):\n raise UsedArchivepgsqlAsArchiveWAL('archivepgsql was called with' \\\n ' a WAL file path as an argument. This is' \\\n ' probably due to configuring archivepgsql' \\\n ' as the archive_command in the PGSQL' \\\n ' configuration instead of archivewal.')\n raise TooManyArgumentsException('archivepgsql should not be called' \\\n ' with any arguments. Are you using it as the' \\\n ' archive_command instead of archivewal?')\n return True\n\n\ndef restorewal_parse_args(args=None):\n restorewal_usage = ' '.join([\n os.path.basename(sys.argv[0]),\n '[options]',\n '<name_of_wal_file_to_restore>',\n '<path_to_write_restored_file>',\n ])\n parser = create_common_parser(usage=restorewal_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef restorewal_validate_options_and_args(options=None, args=None):\n args = args or []\n if not common_validate_options_and_args(options, args):\n return False\n nargs = len(args)\n if nargs != 2:\n raise Exception('restorewal must be given the name of the WAL' \\\n ' file to retrieve and the destination path to' \\\n ' restore to.')\n return True\n\n\ndef storagestats_parse_args(args=None):\n storagestats_usage = ' '.join([\n os.path.basename(sys.argv[0]),\n '[options]'])\n parser = create_common_parser(usage=storagestats_usage)\n options, args = parser.parse_args(args)\n return parser, options, args\n\n\ndef storagestats_validate_options_and_args(options=None, args=None):\n if not common_validate_options_and_args(options, args):\n return False\n if args:\n raise TooManyArgumentsException('storagestats takes no arguments')\n return True\n", "step-ids": [ 25, 26, 29, 31, 32 ] }
[ 25, 26, 29, 31, 32 ]