diff --git "a/2562.jsonl" "b/2562.jsonl" new file mode 100644--- /dev/null +++ "b/2562.jsonl" @@ -0,0 +1,2117 @@ +{"seq_id":"13806399895","text":"import numpy as np\nimport tensorflow as tf\n\nfrom sklearn.utils import shuffle\n\nif tf.__version__.startswith('2'):\n tf.compat.v1.disable_eager_execution()\n\n\nclass autoencoder:\n def __init__(self, D, M, id=0, init_weights=None):\n self.D = D\n self.M = M\n self.id = str(id)\n self.W = None\n self.bh = None\n self.bo = None\n self._session = None\n self._initialize_variables(init_weights)\n self._prepare_operations()\n\n def _initialize_variables(self, init_weights):\n if init_weights is None:\n #W_init = (np.random.randn(D, self.M) / np.sqrt(D)).astype(np.float32)\n W_init = (np.random.randn(self.D, self.M)).astype(np.float32)\n bh_init = np.zeros(self.M).astype(np.float32)\n bo_init = np.zeros(self.D).astype(np.float32)\n else:\n W_init, bh_init, bo_init = init_weights\n\n self.W = tf.Variable(W_init)\n self.bh = tf.Variable(bh_init)\n self.bo = tf.Variable(bo_init)\n\n def _prepare_operations(self):\n self._x_input = tf.compat.v1.placeholder(tf.float32, shape=(None, self.D))\n self._z = self.forward_hidden(self._x_input) # for transform() later\n self._x_hat = self.forward_output(self._x_input)\n\n x_hat_logits = self.forward_logits(self._x_input)\n self._cost = tf.reduce_mean(\n input_tensor=tf.nn.sigmoid_cross_entropy_with_logits(\n labels=self._x_input,\n logits=x_hat_logits,\n )\n )\n\n self._train_op = tf.compat.v1.train.AdamOptimizer(1e-1).minimize(self._cost)\n # self.train_op = tf.train.MomentumOptimizer(1e-3, momentum=0.9).minimize(self.cost)\n\n def set_session(self, session):\n self._session = session\n\n def fit(self, X, n_epochs=1, batch_size=100):\n N, D = X.shape\n assert self.D == D\n n_batches = N // batch_size\n\n history = []\n self._session.run(tf.compat.v1.global_variables_initializer())\n for i in range(n_epochs):\n X = shuffle(X)\n for j in range(n_batches):\n batch = X[j * batch_size : (j * batch_size + batch_size)]\n _, c = self._session.run((self._train_op, self._cost), feed_dict={self._x_input: batch})\n if j % 100 == 0:\n print(f'autoencoder:{self.id} - cost after epoch:{i}, batch:{j} - {c}')\n history.append(c)\n return history\n\n def forward_hidden(self, x):\n _, D = x.shape\n assert self.D == D\n return tf.nn.sigmoid(tf.matmul(x, self.W) + self.bh)\n\n def forward_logits(self, x):\n z = self.forward_hidden(x)\n return tf.matmul(z, tf.transpose(a=self.W)) + self.bo\n\n def forward_output(self, x):\n return tf.nn.sigmoid(self.forward_logits(x))\n\n def transform(self, X):\n _, D = X.shape\n assert self.D == D\n return self._session.run(self._z, feed_dict={self._x_input: X})\n\n def predict(self, X):\n _, D = X.shape\n assert self.D == D\n return self._session.run(self._x_hat, feed_dict={self._x_input: X})\n\n","repo_name":"figielf/MLLab","sub_path":"mllib/autoencoders/autoencoder.py","file_name":"autoencoder.py","file_ext":"py","file_size_in_byte":3139,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"74282165987","text":"from flask import request\nfrom flask_restx import Namespace, Resource, abort, reqparse\nfrom typing import List\n\nfrom application.persons.service import PersonService\nfrom application.persons.model import person_model, entity_with_type_model\nfrom application.utilities.wrap_functions import user_token_required, admin_token_required\nfrom application.utilities.paginating import paginate_results\n\napi = Namespace(\"Persons\", description=\"persons related operations\")\nperson = api.model(\"Person\", person_model)\nentity_type_news = api.model(\"Entity_Type_News\", entity_with_type_model)\n\npersons_pagin_parser = reqparse.RequestParser()\npersons_pagin_parser.add_argument('start', location='args', type=int, help='The position to start getting results')\npersons_pagin_parser.add_argument('limit', location='args', type=int, help='Limit the number of news returned')\n@api.route(\"/\")\nclass PersonsCollection(Resource):\n @api.doc(responses={200: 'OK'}, parser= persons_pagin_parser)\n @user_token_required\n def get(self) -> List:\n \"\"\"Get all Persons\n Limit 1000 person entities\n \"\"\"\n return paginate_results(persons_pagin_parser, request.base_url, PersonService.get_all)\n\n\n @api.doc(responses={200: 'OK', 201: 'Created', 400: 'Bad Request'})\n @api.expect(person, validate=True)\n @admin_token_required\n def post(self):\n \"\"\"Create a new person\n Use this method to create a new person.\n * Send a JSON object with the detail in the request body.\n ```\n {\n \"name\": \"Person Name\",\n \"entityID\": \"Person ID\",\n \"des\": \"Person Description\"\n }\n ```\n \"\"\"\n new_person = request.json\n per = PersonService.get_by_id(new_person['entityID'])\n if not per:\n result = PersonService.create(new_person)\n return result[0], 201\n else:\n return {\"message\": \"Unable to create because the person with this id already exists\"}, 400\n\n\n@api.route(\"/\")\nclass PersonEntity(Resource):\n @api.doc(responses={200: 'OK', 404: 'Not Found'})\n @user_token_required\n def get(self, id):\n \"\"\"Get a specific Person\"\"\"\n result = PersonService.get_by_id(id)\n if not result:\n return {\"message\": \"The person does not exist\"}, 404\n else:\n return result[0]\n\n @api.doc(responses={200: 'OK', 404: 'Not Found', 400: 'Bad Request'})\n @api.expect(person)\n @admin_token_required\n def put(self, id):\n \"\"\" Update a person\n Use this method to change properties of a person.\n * Send a JSON object with new properties in the request body.\n ```\n {\n \"name\": \"New Person Name\",\n \"des\": \"New Person Description\",\n \"entityID\": \"Person ID\"\n }\n ```\n * Specify the ID of the category to modify in the request URL path.\n \"\"\"\n data = request.json\n if data[\"entityID\"] != id:\n return {\"message\": \"entityID property in the incoming json object and id parameter in the URL path are\"\n \"not matched\"}, 400\n per = PersonService.get_by_id(id)\n if not per:\n return {\"message\": \"The person does not exist\"}, 404\n else:\n return PersonService.update(data, id)\n\n @api.doc(responses={200: 'OK', 400: 'Bad Request'})\n @admin_token_required\n def delete(self, id):\n \"\"\"Delete a person\"\"\"\n is_referenced = PersonService.is_in_news(id)\n if is_referenced:\n return {\"message\": \"Unable to delete because the person with this id is being referenced\"}, 400\n else:\n PersonService.delete(id)\n return {\"message\": \"Successful\"}, 200\n\n@api.route(\"/search\")\nclass SearchPersonResource(Resource):\n @api.doc(responses={200: 'OK'}, parser=persons_pagin_parser)\n @user_token_required\n def post(self):\n if \"text\" in request.json:\n text_search = request.json[\"text\"]\n else:\n text_search = ' '\n return paginate_results(persons_pagin_parser, request.base_url, PersonService.search, text_search)\n\n@api.route(\"/merge_nodes\")\nclass MergeNodesResource(Resource):\n @api.expect(entity_type_news, validate=True)\n @admin_token_required\n def post(self):\n \"\"\"Merge entities having the Person type\n *Keep entityID property of one entity, combine for the rest properties and also merge relations\n \"\"\"\n set_entity_id = request.json[\"set_entity_id\"]\n return PersonService.merge_nodes(set_entity_id)\n\n\n\n\n\n\n\n\n\n","repo_name":"dinhphien/api_server","sub_path":"application/persons/controller.py","file_name":"controller.py","file_ext":"py","file_size_in_byte":4600,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"20294136672","text":"import streamlit as st\r\nfrom wordcloud import WordCloud, STOPWORDS\r\nfrom PIL import Image\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\nst.title(\"WORD CLOUD\")\r\n\r\ndef draw_wordcloud(background_color, max_words, img, width, height, text):\r\n wc = WordCloud(\r\n background_color=background_color,\r\n max_words=max_words,\r\n stopwords = set(STOPWORDS),\r\n mask = np.array(img)\r\n )\r\n\r\n fig = plt.figure()\r\n wc.generate(text)\r\n fig.set_figwidth(width) # set width\r\n fig.set_figheight(height) # set height\r\n plt.imshow(wc, interpolation='bilinear')\r\n plt.axis('off')\r\n st.pyplot()\r\n\r\n\r\ndef main():\r\n st.sidebar.title(\"🌟 Select your favourite Image 🌟\")\r\n\r\n img = st.sidebar.selectbox(\"Mask Images for Word Cloud\",(\"CLOUD\",\"INDIA\",\"SPY\",\"STAR\",\"UPVOTE\",\"LEAF\",\"BIRD\"))\r\n\r\n st.sidebar.header(\"Preview\")\r\n img1 = Image.open(\"./Images/cloud.png\")\r\n image = img1\r\n st.sidebar.image(img1, width=250, caption=\"CLOUD\")\r\n\r\n img2 = Image.open(\"./Images/india.png\")\r\n st.sidebar.image(img2, width=250, caption=\"INDIA\")\r\n\r\n img3 = Image.open(\"./Images/spy.png\")\r\n st.sidebar.image(img3, width=250, caption=\"SPY\")\r\n\r\n img4 = Image.open(\"./Images/star.png\")\r\n st.sidebar.image(img4, width=250, caption=\"STAR\")\r\n\r\n img5 = Image.open(\"./Images/upvote.png\")\r\n st.sidebar.image(img5, width=250, caption=\"UPVOTE\")\r\n\r\n img6 = Image.open(\"./Images/leaf.png\")\r\n st.sidebar.image(img6, width=250, caption=\"LEAF\")\r\n\r\n img7 = Image.open(\"./Images/bird.png\")\r\n st.sidebar.image(img7, width=250, caption=\"BIRD\")\r\n\r\n if img == \"INDIA\":\r\n image = img2\r\n\r\n elif img == \"SPY\":\r\n image = img3\r\n\r\n elif img == \"STAR\":\r\n image = img4\r\n\r\n elif img == \"UPVOTE\":\r\n image = img5\r\n\r\n elif img == \"LEAF\":\r\n image = img6\r\n\r\n elif img == \"BIRD\":\r\n image = img7\r\n\r\n else:\r\n image = img1\r\n\r\n st.header(\"Select Background Color\")\r\n bgc = st.radio(\"Background Color\", (\"White\",\"Black\",\"Yellow\",\"Red\",\"Blue\",\"Green\",\"Orange\",\"Violet\"), key=\"bgc\")\r\n\r\n st.header(\"Select Maximim Number of Words\")\r\n max_words = st.slider(\"Max Words\" , 1000, 3000 , key=\"max_words\")\r\n\r\n st.header(\"Set Width and Height\")\r\n width = st.slider(\"Width\" , 10, 50 , key=\"width\")\r\n height = st.slider(\"Height\" , 10, 60 , key=\"height\")\r\n\r\n msg = st.text_area(\"Enter the Text\",\"Type Here...\")\r\n\r\n if st.button(\"Submit\",key=\"b1\"):\r\n draw_wordcloud(bgc, max_words, image, width, height, msg)\r\n\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n","repo_name":"JamesBondOOO7/Word-Cloud-App","sub_path":"app.py","file_name":"app.py","file_ext":"py","file_size_in_byte":2561,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"70809300388","text":"\ndef create_class(name, response):\n\tklass = type(name, (object,), {})\n\tfor key, value in response.items():\n\t\tif type(value) == dict:\n\t\t\tsubklass = create_class(key, value)\n\t\t\tsetattr(klass, key, subklass)\n\t\telse:\n\t\t\tsetattr(klass, key, value)\n\treturn klass","repo_name":"AmauryVanEspen/stackexchangepy","sub_path":"stackexchangepy/model.py","file_name":"model.py","file_ext":"py","file_size_in_byte":256,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"9440688694","text":"def heapify(arr,i,n):\r\n largest= i\r\n left= (2*i)+1\r\n right= (2*i)+2\r\n if leftarr[i]:\r\n largest= left\r\n if rightarr[largest]:\r\n largest= right\r\n if largest!=i:\r\n arr[i],arr[largest]= arr[largest],arr[i]\r\n heapify(arr,largest,n)\r\ndef heap_sort(arr):\r\n n= len(arr)\r\n for i in range(n//2-1,-1,-1): # n//2-1 th node is the last node with child\r\n heapify(arr,i,n)\r\n # now heapifying all nodes from right to left\r\n for i in range(n-1,0,-1):\r\n arr[0],arr[i]= arr[i],arr[0]\r\n heapify(arr,0,i)\r\n\r\ndef selection(arr):\r\n swaps=0\r\n n=len(arr)\r\n for i in range(n-1):\r\n min= i\r\n for j in range(i+1,n):\r\n if arr[j]=0 and arr[j]>key: \r\n arr[j+1]=arr[j]\r\n j-=1\r\n swaps+=1\r\n arr[j+1]=key\r\n print(\"swaps in insertion sort =\",swaps)\r\ndef bubble(arr):\r\n n=len(arr)\r\n swaps=0\r\n for i in range(n-1):\r\n for j in range(n-1-i): # here i am assuming that last i elements are sorted because after every swap heaviest element reach to last\r\n if arr[j]>arr[j+1]:\r\n temp= arr[j]\r\n arr[j]=arr[j+1]\r\n arr[j+1]=temp\r\n swaps+=1\r\n print(\"swaps in bubble sort =\",swaps)\r\n\r\ndef bubble_best(arr):\r\n n=len(arr)\r\n swaps=0\r\n while True:\r\n flag=0\r\n for i in range(1,n):\r\n if arr[i-1]>arr[i]:\r\n arr[i],arr[i-1]=arr[i-1],arr[i]\r\n swaps+=1\r\n flag=i\r\n n=flag\r\n if n==0:\r\n break\r\n print(\"swaps in bubble_best sort =\",swaps)\r\nif __name__=='__main__':\r\n arr=[10,1,3,4,5,15,9]\r\n bubble(arr)\r\n print(arr)\r\n arr=[10,1,3,4,5,15,9]\r\n insertion(arr)\r\n print(arr)\r\n arr=[10,1,3,4,5,15,9]\r\n selection(arr)\r\n print(arr)\r\n arr=[10,1,3,4,5,15,9]\r\n bubble_best(arr)\r\n print(arr)\r\n arr=[10,1,3,4,5,15,9]\r\n heap_sort(arr)\r\n print(arr)","repo_name":"rishabhsharma2305/Python_programms","sub_path":"sorting.py","file_name":"sorting.py","file_ext":"py","file_size_in_byte":2273,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"26587590576","text":"name = \"ada lovelace\"\nprint(name.title())\n\n# Above we stored the value, \"ada lovelace\" in a variable named, 'name'\n# The dot (.) after name in name.title() tells Python to make the title() method\n# act on the variable, name.\n\n# CONCATENATING STRINGS/COMBINING STRINGS\n\n# While we program more and more, we'll find that it is useful for us to combine strings\n# for example, let's say we wanted to combine to seperate variables,\n# like first name and last name.\n\nfirst_name = \"ada\"\nlast_name = \"lovelace\"\nfull_name = first_name + \" \" + last_name\n\nprint(full_name)\n\n# or with a little more content and use of variables:\nmessage = \"Hello \" + full_name.title() + \"!\"\nprint(message)\n","repo_name":"joetechem/cs_python","sub_path":"cs_python/solutions/first_string.py","file_name":"first_string.py","file_ext":"py","file_size_in_byte":677,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"40542837292","text":"def PositionalEncoding_gpu(x_emb,x):\n #depth BxN\n x_new = x-x.mean(dim=2).view(x.size(0),x.size(1),1)\n #max_dist = torch.max(torch.sqrt(torch.sum(x_new**2,dim=1,keepdim=True)),dim=2,keepdim=True)[0]\n\n depth = torch.zeros(x.size(0),x.size(2)).type(torch.LongTensor).cuda()\n pe = torch.zeros_like(x_emb).cuda()\n distances = torch.sqrt(torch.sum(x_new**2,dim=1,keepdim=True)) # Bx1xN\n distances = (10*distances).type(torch.LongTensor)\n\n depth = distances.view(x.size(0),x.size(2))\n idx = torch.tensor(range(x_emb.size(1))).repeat(x_emb.size(0)*x_emb.size(2),1).view(x_emb.size(0),x_emb.size(1),x_emb.size(2)).cuda()\n depth = depth.repeat(1,x_emb.size(1)).view(x_emb.size(0),x_emb.size(1),x_emb.size(2)).type(torch.FloatTensor).cuda()\n temp= depth/(10000**(idx.type(torch.FloatTensor)/x_emb.size(1)).cuda())\n pe = torch.from_numpy(np.sin(temp.cpu().numpy())).cuda()\n return pe # BxNxD\n\ndef PositionalEncoding_cpu(x_emb,x):\n x_new = x-x.mean(dim=2).view(x.size(0),x.size(1),1)\n \n distances = torch.sqrt(torch.sum(x_new**2,dim=1,keepdim=True))\n depth = distances.view(x.size(0),x.size(2)) \n idx = torch.tensor(range(x_emb.size(1))).repeat(x_emb.size(0)*x_emb.size(2),1).view(x_emb.size(0),x_emb.size(1),x_emb.size(2))\n depth = depth.repeat(1,x_emb.size(1)).view(x_emb.size(0),x_emb.size(1),x_emb.size(2)).type(torch.FloatTensor)\n temp= depth/(10000**(idx.type(torch.FloatTensor)/x_emb.size(1)))\n pe = torch.from_numpy(np.sin(temp.numpy())).cuda()\n return pe\n\nclass PositionalEncoding_mlp(nn.Module):\n def __init__(self,num_points):\n super(PositionalEncoding_mlp, self).__init__()\n self.num_points = num_points\n self.nn = nn.Sequential(nn.Linear(3, 64),\n nn.BatchNorm1d(num_points),\n nn.Tanh(),\n nn.Linear(64, 128),\n nn.BatchNorm1d(num_points),\n nn.Tanh(),\n nn.Linear(128, 256),\n nn.BatchNorm1d(num_points),\n nn.Tanh(),\n nn.Linear(256, 512),\n nn.BatchNorm1d(num_points),\n nn.Tanh())\n \n\n def forward(self, x_emb, x):\n # Absolute PE\n x_new = x-x.mean(dim=2).view(x.size(0),x.size(1),1)\n \n r = torch.sqrt(torch.sum(x_new**2,dim=1,keepdim=True))\n r = r.view(x.size(0),x.size(2),1)\n \n azimuth = torch.atan(x_new.transpose(1,0)[1]/x_new.transpose(1,0)[0])#.view(x.size(0),x.size(2),1) BxN\n nan = (azimuth!=azimuth)\n pos = (x_new.transpose(1,0)[1]>0)\n neg = (x_new.transpose(1,0)[1]<=0)\n azimuth[nan*pos] = torch.tensor(math.pi/2).cuda()\n azimuth[nan*neg] = torch.tensor(3*math.pi/2).cuda()\n azimuth = azimuth.view(x.size(0),x.size(2),1)\n inclination = torch.acos(x_new.transpose(1,0)[2]/r.view(x.size(0),x.size(2)))#.view(x.size(0),x.size(2),1) #x_new: 8x1024, r: 8x1024x1\n inclination[inclination!=inclination]=torch.tensor(0).cuda()\n inclination = inclination.view(x.size(0),x.size(2),1)\n total = torch.cat((r,azimuth,inclination),dim=-1)\n pe = self.nn(total)\n return pe.transpose(2,1)\n \n \n","repo_name":"a18700/PointCloud","sub_path":"GTModules/PositionalEncoding.py","file_name":"PositionalEncoding.py","file_ext":"py","file_size_in_byte":3365,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"4086754793","text":"class Solution:\n def findMedianSortedArrays(self, nums1: List[int], nums2: List[int]) -> float:\n n1, n2 = len(nums1), len(nums2)\n if n1 > n2:\n return self.findMedianSortedArrays(nums2, nums1)\n k = (n1 + n2 + 1) // 2\n # 下标: 0 1 2 3 4 5 6 (7)\n # n1+n2为奇(7): k=4,在中位数3偏右\n # n1+n2为偶(8): k=4, 在中位数3,4,偏右\n # 综上,下标k是中位数下标偏右,这样就可以复用c1, 偶数的时候只需要计算出c2\n lo, hi = 0, n1\n while lo < hi:\n i = lo + ((hi - lo) >> 1)\n j = k - i\n if nums1[i] < nums2[j - 1]:\n lo = i + 1\n else:\n hi = i\n i, j = lo, k - lo\n # nums1, nums2可能到数组头\n c1 = max(nums1[i - 1] if i > 0 else float('-Inf'), nums2[j - 1] if j > 0 else float('-Inf'))\n if (n1 + n2) & 1:\n return c1\n # nums1, nums2可能到数组尾\n c2 = min(nums1[i] if i < n1 else float('Inf'), nums2[j] if j < n2 else float('Inf'))\n return (c1 + c2) / 2.0\n","repo_name":"Iruze/SolutionsOnLeetcodeForZZW","sub_path":"4_MedianofTwoSortedArrays/findMedianSortedArrays_2.py","file_name":"findMedianSortedArrays_2.py","file_ext":"py","file_size_in_byte":1102,"program_lang":"python","lang":"zh","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"7673390883","text":"# cycle - цикл\n# цикл while\n\nx = int(input())\ncount = 0\ny = 1\n\nwhile count < x:\n count += 1\n y *= count\nelse:\n print(y)\n\nx = ''\n\nwhile len(x) < 5:\n y = input('Ввод данных: ')\n if y == 'o':\n continue # прерывает итерацию цикла\n if y == 'l':\n break # прерывает весь цикл\n\n x += y\nelse:\n print(x)\n","repo_name":"WebDevRun/python_lessons","sub_path":"6_cycle_while.py","file_name":"6_cycle_while.py","file_ext":"py","file_size_in_byte":391,"program_lang":"python","lang":"ru","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"33845762110","text":"from flask import Flask, request\nfrom flask_restful import Api, Resource, abort, reqparse\n\napp = Flask(__name__)\napi = Api(app)\n\nlaunch_put_args = reqparse.RequestParser()\nlaunch_put_args.add_argument('vehicle_name', type=str, help='Name Of Launch Vehicle is REQUIRED', required=True)\nlaunch_put_args.add_argument('launch_date', type=str, help='Date of Vehicle Launch is REQUIRED', required=True)\nlaunch_put_args.add_argument('mission_status', type=str, help='Status of Mission is REQUIRED', required=True)\n\nlaunches = {\n \n}\n\ndef abort_check(launch_1d, status):\n if status == 404:\n if launch_1d not in launches:\n abort(404, message=\"Launch ID not found :(\")\n elif status == 406:\n if launch_1d in launches:\n abort(406, message=\"Launch ID already exists :(\")\n\n\n\nclass Krypton(Resource):\n \n def get(self, launch_id):\n abort_check(launch_id, 404)\n return launches[launch_id]\n \n def post(self, launch_id):\n abort_check(launch_id, 404)\n return launches[launch_id]\n\n def put(self, launch_id):\n abort_check(launch_id, 406)\n args = launch_put_args.parse_args()\n launches[launch_id] = args\n return launches[launch_id], 201\n \n def delete(self, launch_id):\n abort_check(launch_id, 404)\n del launches[launch_id]\n return '', 204\n\n\napi.add_resource(Krypton, '/krypton/launches/')\n\nif __name__ == '__main__':\n app.run(debug=True)","repo_name":"programmer2215/flask-api","sub_path":"main.py","file_name":"main.py","file_ext":"py","file_size_in_byte":1474,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"519930376","text":"# region Import\nimport os, pickle\nfrom collections import Counter, defaultdict\nfrom typing import Dict, List, Tuple, Set\n\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom sklearn.utils import shuffle\n\nsns.set_style('white')\npd.set_option('display.expand_frame_repr', False)\n# endregion\n\nfolder = 'C:/Users/phan/OneDrive - adesso Group/DataSet/movielens-20m-dataset/'\nsmallRating = os.path.join(folder, 'verySmallRating.csv')\ndf = pd.read_csv(smallRating)\n\nN = df['userId'].max() + 1 # number of users\nM = df['movieId'].max() + 1 # number of movies\n\n# split into train and test\ndf = shuffle(df)\ncutoff = int(0.8 * len(df))\ndfTrain = df.iloc[:cutoff]\ndfTest = df.iloc[cutoff:]\n\n# create 3 dicts for lookup\nuser2movie: Dict[int, List[int]] = defaultdict(list)\nmovie2user: Dict[int, List[int]] = defaultdict(list)\nuserMovie2rating: Dict[Tuple[int, int], float] = defaultdict(float)\nmovie2title: Dict[int, str] = defaultdict(str)\n\nprint('calling map user2movie and movie2user...')\ncount = 0\n\n\ndef mapUser2movieAndMovie2User(row: pd.Series):\n global count\n count += 1\n if count % 50000 == 0:\n print(f'processed: {count / cutoff * 100:.3f} %')\n\n userId = int(row['userId'])\n movieId = int(row['movieId'])\n rating = int(row['rating'])\n title = str(row['title'])\n\n user2movie[userId].append(movieId)\n movie2user[movieId].append(userId)\n userMovie2rating[(userId, movieId)] = rating\n movie2title[movieId] = title\n\n\ndfTrain.apply(lambda row: mapUser2movieAndMovie2User(row), axis=1)\n\n# test ratings dict\nuserMovie2ratingTest: Dict[Tuple[int, int], float] = defaultdict(float)\ncount = 0\n\n\ndef mapUserMovie2ratingTest(row: pd.Series):\n global count\n count += 1\n if count % 50000 == 0:\n print(f'processed: {count / cutoff * 100:.3f} %')\n userId = int(row['userId'])\n movieId = int(row['movieId'])\n rating = int(row['rating'])\n title = str(row['title'])\n userMovie2ratingTest[(userId, movieId)] = rating\n movie2title[movieId] = title\n\n\ndfTest.apply(lambda row: mapUserMovie2ratingTest(row), axis=1)\n\n# save dicts to pickle files\nprint('writing dicts to pickle files...')\nwith open(file=os.path.join(folder, 'user2movie.json'), mode='wb') as f:\n pickle.dump(user2movie, f)\nwith open(file=os.path.join(folder, 'movie2user.json'), mode='wb') as f:\n pickle.dump(movie2user, f)\nwith open(file=os.path.join(folder, 'userMovie2rating.json'), mode='wb') as f:\n pickle.dump(userMovie2rating, f)\nwith open(file=os.path.join(folder, 'userMovie2ratingTest.json'), mode='wb') as f:\n pickle.dump(userMovie2ratingTest, f)\nwith open(file=os.path.join(folder, 'movie2title.json'), mode='wb') as f:\n pickle.dump(movie2title, f)\n","repo_name":"nghiemphan93/machineLearning","sub_path":"recommendersystemAnddeeplearning/preprocess2dict.py","file_name":"preprocess2dict.py","file_ext":"py","file_size_in_byte":2706,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"11243026264","text":"import numpy as np\r\nfrom past.builtins import xrange\r\ndef Consum_matrix(c_m, c_sigma, q_c, M, F, flag_family):\r\n c = np.zeros(M);\r\n for j in range(M):\r\n if j%F==flag_family:\r\n c[j] =np.abs(np.random.normal(c_m, c_sigma)*q_c)\r\n else: \r\n c[j] =np.abs(np.random.normal(c_m, c_sigma)*(1-q_c))\r\n c = c/np.sum(c)*M*c_m \r\n c[0] = np.abs(np.random.normal(c_m, c_sigma)) \r\n return c\r\n\r\n\r\n\r\n##########################################################################\r\ndef Consum_matrix_MA(p, S, M):\r\n c = np.zeros((S, M));\r\n for i in range(S):\r\n for j in range(M):\r\n if np.random.rand() < p:\r\n c[i,j]= 1.0;\r\n return c","repo_name":"Emergent-Behaviors-in-Biology/random-qp","sub_path":"Eco_function/C_matrix.py","file_name":"C_matrix.py","file_ext":"py","file_size_in_byte":732,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"35549785570","text":"import datetime\nfrom src.services import (get_maestro_detalle,get_replica_entity,get_entities,get_last_run_replica_log,get_sucursal_local,insert_or_update_entity,create_replica_log,\n get_sucursales_replica)\nfrom src.database import get_local_pool_connection,get_remote_pool_connection\n\n\n\ndef replica_movimiento_almacen(action):\n\n try:\n localDB = get_local_pool_connection()\n except Exception as e:\n print(e) \n sucursal = get_sucursal_local()\n replica_movimiento_almacen_sucursal(action,sucursal)\n\n\ndef replica_sucursal_pull_movimiento_almacen():\n action = 'PULL'\n try:\n localDB = get_local_pool_connection()\n except Exception as e:\n print(e)\n sucursales = get_sucursales_replica(localDB)\n cnx = localDB.get_conexion()\n cursor = cnx.cursor(dictionary=True, buffered=True)\n for sucursal_replica in sucursales:\n \n sql_sucursal = f\"select * from sucursal where nombre = '{sucursal_replica['server']}' \"\n cursor.execute(sql_sucursal)\n sucursal = cursor.fetchone()\n \n print(sucursal)\n print(\"*\"*100)\n print(sucursal_replica['server'])\n replica_movimiento_almacen_sucursal(action,sucursal)\n \n cnx.close()\n\n \n\ndef replica_movimiento_almacen_sucursal(action,sucursal):\n try:\n localDB = get_local_pool_connection()\n except Exception as e:\n print(e)\n try:\n remoteDB = get_remote_pool_connection()\n except Exception as e:\n print(e) \n table = 'movimiento_de_almacen'\n fecha = datetime.datetime.today()\n if action == 'PUSH':\n replica_push_movimiento_almacen(localDB, remoteDB, action,table,fecha,sucursal)\n if action == 'PULL':\n replica_pull_movimiento_almacen(localDB, remoteDB, action,table,fecha,sucursal)\n\n\n\ndef replica_pull_movimiento_almacen(localDB, remoteDB, action,table,fecha,sucursal):\n\n last_run = get_last_run_replica_log(remoteDB,fecha,table,sucursal['nombre'],action) \n query = f\"select * from {table} where (date_created >= '{last_run}' or last_updated >= '{last_run}' ) and sucursal_id = '{sucursal['id']}'\"\n entities = get_entities(remoteDB,query)\n #create_replica_log(remoteDB,action,sucursal['nombre'],table)\n for entity in entities:\n mov,partidas = get_movimiento(remoteDB,entity['id'])\n insert_or_update_entity(localDB,table,mov)\n for partida in partidas:\n inventario = get_replica_entity(remoteDB,'inventario',partida['inventario_id'])\n insert_or_update_entity(localDB,'inventario',inventario)\n insert_or_update_entity(localDB,'movimiento_de_almacen_det',partida)\n\n\ndef replica_push_movimiento_almacen(localDB, remoteDB, action,table,fecha,sucursal):\n\n last_run = get_last_run_replica_log(remoteDB,fecha,table,sucursal['nombre'],action) \n query = f\"select * from {table} where (date_created >= '{last_run}' or last_updated >= '{last_run}' ) and sucursal_id = '{sucursal['id']}'\"\n entities = get_entities(localDB,query)\n #create_replica_log(remoteDB,action,sucursal['nombre'],table)\n for entity in entities:\n mov,partidas = get_movimiento(localDB,entity['id'])\n insert_or_update_entity(remoteDB,table,mov)\n for partida in partidas:\n inventario = get_replica_entity(localDB,'inventario',partida['inventario_id'])\n insert_or_update_entity(remoteDB,'inventario',inventario)\n insert_or_update_entity(remoteDB,'movimiento_de_almacen_det',partida)\n\n\n\ndef get_movimiento(localDB,id):\n query_sol = f\"select * from movimiento_de_almacen where id = '{id}' \" \n query_partidas = f\"select * from movimiento_de_almacen_det where movimiento_de_almacen_id = '{id}' \" \n movimiento, partidas = get_maestro_detalle(localDB,query_sol,query_partidas)\n return movimiento, partidas ","repo_name":"lquintanillab06/replica-push-pull","sub_path":"src/operations/replica_movimiento_almacen.py","file_name":"replica_movimiento_almacen.py","file_ext":"py","file_size_in_byte":3844,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"13377353048","text":"import unittest\nimport bdd\nimport io\n\n\nSIMPLE_FEATURE = \"\"\" Feature: awesomeness\nScenario: Do some magic\nGiven some issue\nWhen this and that is done\nThen something cool happens\n\"\"\"\n\n\nclass TestParser(unittest.TestCase):\n \"\"\" Check if the parser works correctly \"\"\"\n def setUp(self):\n self.parser = bdd.Parser()\n\n def test_a_feature(self):\n feature = self.parser.parse(io.StringIO(SIMPLE_FEATURE))\n self.assertEqual(3, len(feature.scenarios[0].steps))\n\n def test_a_multiline_feature(self):\n feature = \"\"\" Feature: awesomeness\n also considering other cool stuff\n in case of insane nice stuff\n\n Scenario: Do some magic\n\n Given some issue\n And something else\n When this\n And that is done\n Then something cool happens\n And also something really cool happens\n But then nothing happens\n \"\"\"\n self.parser.parse(io.StringIO(feature))\n\n def test_multi_feature(self):\n \"\"\" Test the parsing of multiple features in a single source \"\"\"\n feature = \"\"\" Feature: awesomeness\n Scenario: Do some magic\n Given some issue\n Feature: this feature must be in a seperate file\n \"\"\"\n with self.assertRaises(SyntaxError):\n self.parser.parse(io.StringIO(feature))\n\n def test_invalid_tag(self):\n feature = \"\"\" Feature: awesomeness\n @slow this @is invalid\n Scenario: Do some magic\n Given some issue\n \"\"\"\n with self.assertRaises(SyntaxError):\n self.parser.parse(io.StringIO(feature))\n\n def test_and_as_first_step(self):\n feature = \"\"\" Feature: awesomeness\n Scenario: Do some magic\n And this cannot be\n \"\"\"\n with self.assertRaises(SyntaxError):\n self.parser.parse(io.StringIO(feature))\n\n\nclass TestUnittestGenerator(unittest.TestCase):\n def test_a_feature(self):\n parser = bdd.Parser()\n feature = parser.parse(io.StringIO(SIMPLE_FEATURE))\n env = bdd.Environment()\n env.make_test_case(feature)\n\n\nif __name__ == '__main__':\n unittest.main()\n","repo_name":"windelbouwman/bdd","sub_path":"test/test_bdd.py","file_name":"test_bdd.py","file_ext":"py","file_size_in_byte":2135,"program_lang":"python","lang":"en","doc_type":"code","stars":2,"dataset":"github-code","pt":"70"} +{"seq_id":"9605246753","text":"from dimacs import *\nfrom queue import PriorityQueue \nfrom dataclasses import dataclass, field\nimport sys\nimport os\n\n\n@dataclass\nclass FindUnion:\n rank: int = 0\n parent: \"FindUnion\" = field(init=False, repr=False)\n\n def __post_init__(self):\n self.parent = self\n\n def find(self):\n if self.parent is not self:\n self.parent = self.parent.find()\n return self.parent\n\n def union(self, other: \"FindUnion\"):\n self = self.find()\n other = other.find()\n\n if self.rank > other.rank:\n other.parent = self\n else:\n if self.rank == other.rank:\n other.rank += 1\n self.parent = other.parent\n\n def same_set(self, other):\n return self.find() is other.find()\n\n def __repr__(self):\n return hex(id(self.find())).upper()\n \n\ndef turist_guide(V, L):\n unions = [FindUnion() for _ in range(V)]\n L.sort(key=lambda x: x[2], reverse=True)\n\n m = L[0][2]\n \n for (u, v, w) in L: \n if not unions[u - 1].same_set(unions[v - 1]):\n unions[u - 1].union(unions[v - 1])\n m = min(m, w)\n\n if unions[0].same_set(unions[1]):\n return m;\n\n return None\n \n\ndef _test(graph):\n solution = readSolution(f\"graphs/{graph}\")\n (V, L) = loadWeightedGraph(f\"graphs/{graph}\")\n result = turist_guide(V, L)\n\n print(f\"graph={graph}\", f\"solution={solution}\", f\"result={result}\")\n\n assert int(solution) == result \n\nif __name__ == \"__main__\":\n graph = \"g1\"\n if len(sys.argv) > 1:\n _test(graph)\n else:\n for graph in os.listdir(\"graphs/\"):\n _test(graph)\n\n\n\n","repo_name":"bgrzesik/sem3-2020-ag","sub_path":"lab1/turist_guide_find_union.py","file_name":"turist_guide_find_union.py","file_ext":"py","file_size_in_byte":1661,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"23519793531","text":"import csv\nimport similarity\n\n\ndef normalize_actual_elapsed_time(actual_elapsed_time):\n max_value = similarity.get_max(actual_elapsed_time)\n min_value = similarity.get_min(actual_elapsed_time)\n for x in range(0, len(actual_elapsed_time)):\n time = actual_elapsed_time[x]\n actual_elapsed_time[x] = similarity.normalize_numerical(time, min_value, max_value)\n return actual_elapsed_time\n\n\ndef normalize_origin(origin):\n origin = similarity.normalize_categorical(origin)\n return origin\n\n\ndef normalize_distance(distance):\n for y in range(0, len(distance)):\n distance[y] = similarity.normalize_ordinal(distance[y])\n return distance\n\n\ndef normalize_cancelled(cancelled):\n for x in range(0, len(cancelled)):\n if cancelled[x] == \"NA\":\n cancelled[x] = 0\n else:\n cancelled[x] = int(cancelled[x])\n return cancelled\n\n\ndef normalize_csv():\n\n header = []\n with open('original_stats.csv', 'rb') as csvfile:\n reader = csv.reader(csvfile, delimiter=',', quotechar=' ', quoting=csv.QUOTE_MINIMAL)\n\n data = [[], [], [], []]\n\n row_count = 0\n for row in reader:\n\n if row_count != 0:\n for y in range(0, len(row)):\n item = row[y]\n data[y].append(item)\n else:\n header = row\n row_count += 1\n\n actual_elapsed_time = data[0]\n origin = data[1]\n distance = data[2]\n cancelled = data[3]\n\n actual_elapsed_time = normalize_actual_elapsed_time(actual_elapsed_time)\n origin = normalize_origin(origin)\n distance = normalize_distance(distance)\n cancelled = normalize_cancelled(cancelled)\n\n with open('normalized_stats.csv', 'wb') as csvfile:\n writer = csv.writer(csvfile, delimiter=',', quotechar=' ', quoting=csv.QUOTE_MINIMAL)\n writer.writerow(header)\n\n i = 0\n while i < 1000:\n row = [actual_elapsed_time[i], origin[i], distance[i], cancelled[i]]\n writer.writerow(row)\n i += 1\n\n\n\n\n","repo_name":"taraewilliams/CS455DataMining","sub_path":"normalize_csv.py","file_name":"normalize_csv.py","file_ext":"py","file_size_in_byte":2060,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"30323443904","text":"import libraryTeun as te\n\nsliderOne = None\nplot = None\nindex = 0\ni = 0\nn_times = 2\n\n\ndef setup():\n global sliderOne, buttonOne, plot\n size(1500, 500)\n frameRate(30)\n plot = te.Plot(75,75,1360,360,1360,360)\n sliderOne = te.SlideBar(20, 10, 1460, 50, 1, 50)\n updateClass()\n background(100)\n\ndef draw():\n global n_times\n background(100)\n sliderOne.update()\n n_times = floor(sliderOne.getValue())*2\n updateClass()\n\n\ndef updateClass():\n global index, i\n index += 0.01\n i += 0.02\n #newCos = 4/PI* ((cos(1*PI*i)/1) - (cos(3*PI*i)/3) + (cos(5*PI*i)/5) - (cos(7*PI*i)/7) + (cos(9*PI*i)/9) - (cos(11*PI*i)/11) + (cos(13*PI*i)/13) - (cos(15*PI*i)/15) + (cos(17*PI*i)/17))\n\n plot.update(map(getCos(n_times, i), -1.5, 1.5, 0, 360))\n\n\ndef getCos(n, i):\n z = 1\n sums = []\n sum = 0\n for _ in range(n):\n sums.append(cos(z*PI*i)/z)\n z += 2\n for x in range(0, n, 2):\n sum += sums[x] - sums[x+1]\n return 4/PI*sum\n","repo_name":"Teun-Schuur/Processing","sub_path":"Python/fourier_series/fourier_series.pyde","file_name":"fourier_series.pyde","file_ext":"pyde","file_size_in_byte":983,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"20235674746","text":"from my_functions import change_working_dir\nfrom my_functions import get_records_from_text_file\nfrom my_functions import write_records_to_text_file\n\ndef task1():\n DATA_FILE = 'scores.txt'\n #DATA_FILE = 'does_not_exist.txt'\n LOG_FILE = 'log.txt'\n\n change_working_dir() # Ensure that the Python working directory is the directory where this file is\n\n file_records = get_records_from_text_file(DATA_FILE) #IOErrors handled in function call\n\n if file_records != None:\n valid_records = []\n for record in file_records:\n try:\n name = str(record[0])\n score = int(record[1])\n valid_records.append([name, score])\n except ValueError: # Handle ValueErrors for bad score values\n print(f'Bad score value for {record[0]}, ignored.')\n \n student_count = len(valid_records) # Calculate the number of students\n class_ave = sum(record[1] for record in valid_records) / student_count # Calculate the average of student scores\n \n # Print the output 'The class average is 79 for 3 students.'\n print(f'The class average is {class_ave:.0f} for {student_count} students.')\n\n write_records_to_text_file(LOG_FILE, valid_records) # Write the output to log.txt. use 'with' statement\n\ntask1()\n","repo_name":"noozip2241993/learning-python","sub_path":"csulb-is-640/homework/homework5/task1.py","file_name":"task1.py","file_ext":"py","file_size_in_byte":1321,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"17746908351","text":"import os\nfrom waf_common import setup_houdini\n\nHHOME = r'C:/Users/alex/Documents/houdini16.0'\nDSO_HOME = os.path.join(HHOME, 'DSO')\n\n\ndef configure(conf):\n conf.setup_houdini()\n\n\ndef build(ctx):\n ctx.shlib(source=\"sop_gpattern.cpp\",\n target='sop_gpattern',\n includes=['.', ctx.env.HFS_INC],\n defines=ctx.env.DEFINES,\n )\n ctx.install_files(DSO_HOME, ['sop_gpattern.dll'])\n","repo_name":"alexxbb/gpattern","sub_path":"wscript","file_name":"wscript","file_ext":"","file_size_in_byte":433,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"28449546703","text":"#!/usr/bin/env python\n# coding: utf-8\n\n# # Setup\n\n# First, let's import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures. We also check that Python 3.5 or later is installed (although Python 2.x may work, it is deprecated so we strongly recommend you use Python 3 instead), as well as Scikit-Learn ≥0.20.\n\n# In[1]:\n\n\n# Python ≥3.5 is required\nimport sys\nassert sys.version_info >= (3, 5)\n\n# Scikit-Learn ≥0.20 is required\nimport sklearn\nassert sklearn.__version__ >= \"0.20\"\n\n# Common imports\nimport numpy as np\nimport os\n\n# to make this notebook's output stable across runs\nnp.random.seed(42)\n\n# To plot pretty figures\nget_ipython().run_line_magic('matplotlib', 'inline')\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nmpl.rc('axes', labelsize=14)\nmpl.rc('xtick', labelsize=12)\nmpl.rc('ytick', labelsize=12)\n\n# Where to save the figures\nPROJECT_ROOT_DIR = \".\"\nCHAPTER_ID = \"unsupervised_learning\"\nIMAGES_PATH = os.path.join(PROJECT_ROOT_DIR, \"images\", CHAPTER_ID)\nos.makedirs(IMAGES_PATH, exist_ok=True)\n\ndef save_fig(fig_id, tight_layout=True, fig_extension=\"png\", resolution=300):\n path = os.path.join(IMAGES_PATH, fig_id + \".\" + fig_extension)\n print(\"Saving figure\", fig_id)\n if tight_layout:\n plt.tight_layout()\n plt.savefig(path, format=fig_extension, dpi=resolution)\n\n\n# ## Creating a Face Detection Model using clustering techniques on the Olivetti Faces Dataset\n\n# The classic Olivetti faces dataset contains 400 grayscale 64 × 64–pixel images of faces. Each image is flattened to a 1D vector of size 4,096. 40 different people were photographed (10 times each). We can load the dataset using the `sklearn.datasets.fetch_olivetti_faces()` function\n\n# In[2]:\n\n\nfrom sklearn.datasets import fetch_olivetti_faces\n\nolivetti = fetch_olivetti_faces()\n\n\n# In[3]:\n\n\nprint(olivetti.DESCR)\n\n\n# In[4]:\n\n\nolivetti.target\n\n\n# Splitting the dataset into a training set, a validation set, and a test set. Please note that the dataset is already scaled between 0 and 1. Further, we will be using stratified sampling here since our dataset is quite small which will ensure that there are the same number of images per person in each set.\n\n# In[5]:\n\n\nfrom sklearn.model_selection import StratifiedShuffleSplit\n\nstrat_split = StratifiedShuffleSplit(n_splits=1, test_size=40, random_state=42)\ntrain_valid_idx, test_idx = next(strat_split.split(olivetti.data, olivetti.target))\nX_train_valid = olivetti.data[train_valid_idx]\ny_train_valid = olivetti.target[train_valid_idx]\nX_test = olivetti.data[test_idx]\ny_test = olivetti.target[test_idx]\n\nstrat_split = StratifiedShuffleSplit(n_splits=1, test_size=80, random_state=43)\ntrain_idx, valid_idx = next(strat_split.split(X_train_valid, y_train_valid))\nX_train = X_train_valid[train_idx]\ny_train = y_train_valid[train_idx]\nX_valid = X_train_valid[valid_idx]\ny_valid = y_train_valid[valid_idx]\n\n\n# In[6]:\n\n\nprint(X_train.shape, y_train.shape)\nprint(X_valid.shape, y_valid.shape)\nprint(X_test.shape, y_test.shape)\n\n\n# As we saw above the dimensionality of the data is quite high so, we'll reduce the data's dimensionality using PCA:\n\n# In[7]:\n\n\nfrom sklearn.decomposition import PCA\n\npca = PCA(0.99) #99% variance should be explained by the model\nX_train_pca = pca.fit_transform(X_train)\nX_valid_pca = pca.transform(X_valid)\nX_test_pca = pca.transform(X_test)\n\npca.n_components_\n\n\n# ##### Quick fact: We use the fit_transform method on training data and transform method on test data because in fit_transform, we have the fit which calculates the mean and variance of the training data and transform uses this mean and variance to transform/scale the data. Using fit_transform on the test data will let the model know about the test data as well so this will be no surprise for the model. Thus, we use the transform method since this will use the training data mean and variance to transform the test data \n\n# Clustering images using K-Means\n\n# In[8]:\n\n\nfrom sklearn.cluster import KMeans\n\nk_range = range(5, 150, 5)\nkmeans_per_k = []\nfor k in k_range:\n print(\"k={}\".format(k))\n kmeans = KMeans(n_clusters=k, random_state=42).fit(X_train_pca)\n kmeans_per_k.append(kmeans)\n\n\n# In[9]:\n\n\nfrom sklearn.metrics import silhouette_score\n\nsilhouette_scores = [silhouette_score(X_train_pca, model.labels_)\n for model in kmeans_per_k]\nbest_index = np.argmax(silhouette_scores)\nbest_k = k_range[best_index]\nbest_score = silhouette_scores[best_index]\n\nplt.figure(figsize=(8, 3))\nplt.plot(k_range, silhouette_scores, \"bo-\")\nplt.xlabel(\"$k$\", fontsize=14)\nplt.ylabel(\"Silhouette score\", fontsize=14)\nplt.plot(best_k, best_score, \"rs\")\nplt.show()\n\n\n# In[10]:\n\n\nbest_k\n\n\n# It looks like the best number of clusters is quite high, at 120. An expectation around 40 would be more believable since there are 40 different people on the pictures. However, same person is clicked very differently using different angles, using specs which could have led to this high number of clusters.\n\n# In[11]:\n\n\ninertias = [model.inertia_ for model in kmeans_per_k]\nbest_inertia = inertias[best_index]\n\nplt.figure(figsize=(8, 3.5))\nplt.plot(k_range, inertias, \"bo-\")\nplt.xlabel(\"$k$\", fontsize=14)\nplt.ylabel(\"Inertia\", fontsize=14)\nplt.plot(best_k, best_inertia, \"rs\")\nplt.show()\n\n\n# Since we don't see an obvious elbow which would help us to give optimal number of clusters, so let's stick with k=120\n\n# In[12]:\n\n\nbest_model = kmeans_per_k[best_index]\n\n\n# *Visualizing the clusters to see if there are similar faces in the clusters*\n\n# In[13]:\n\n\ndef plot_faces(faces, labels, n_cols=5):\n faces = faces.reshape(-1, 64, 64)\n n_rows = (len(faces) - 1) // n_cols + 1\n plt.figure(figsize=(n_cols, n_rows * 1.1))\n for index, (face, label) in enumerate(zip(faces, labels)):\n plt.subplot(n_rows, n_cols, index + 1)\n plt.imshow(face, cmap=\"gray\")\n plt.axis(\"off\")\n plt.title(label)\n plt.show()\n\nfor cluster_id in np.unique(best_model.labels_):\n print(\"Cluster\", cluster_id)\n in_cluster = best_model.labels_==cluster_id\n faces = X_train[in_cluster]\n labels = y_train[in_cluster]\n plot_faces(faces, labels)\n\n\n# We see that some of clusters are useful: that is, they contain at least 2 pictures, all of the same person. However, the rest of the clusters have either one or more intruders, or they have just a single picture.\n# \n# Clustering images this way may be too imprecise to be directly useful when training a model (as we will see below), but it can be tremendously useful when labeling images in a new dataset: it will usually make labelling much faster.\n\n# ## Using Clustering as Preprocessing for Classification\n\n# *Training a classifier to predict which person is represented in each picture, and evaluate it on the validation set.*\n\n# In[14]:\n\n\nfrom sklearn.ensemble import RandomForestClassifier\n\nclf = RandomForestClassifier(n_estimators=150, random_state=42)\nclf.fit(X_train_pca, y_train)\nclf.score(X_valid_pca, y_valid)\n\n\n# *Using K-Means as a dimensionality reduction tool, and train a classifier on the reduced set.*\n\n# In[17]:\n\n\nX_train_reduced = best_model.transform(X_train_pca)\nX_valid_reduced = best_model.transform(X_valid_pca)\nX_test_reduced = best_model.transform(X_test_pca)\n\nclf = RandomForestClassifier(n_estimators=150, random_state=42)\nclf.fit(X_train_reduced, y_train)\n \nclf.score(X_valid_reduced, y_valid)\n\n\n# This did not help at all. Lets see if tuning the clusters will help or not.\n\n# *Searching for the number of clusters that allows the classifier to get the best performance*\n\n# Since we already have a validation set, we don't need K-fold cross-validation, and we're only exploring a single hyperparameter, so it's simpler to just run a loop manually:\n\n# In[18]:\n\n\nfrom sklearn.pipeline import Pipeline\n\nfor n_clusters in k_range:\n pipeline = Pipeline([\n (\"kmeans\", KMeans(n_clusters=n_clusters, random_state=42)),\n (\"forest_clf\", RandomForestClassifier(n_estimators=150, random_state=42))\n ])\n pipeline.fit(X_train_pca, y_train)\n print(n_clusters, pipeline.score(X_valid_pca, y_valid))\n\n\n# Even after tuning, we never get beyond 80% accuracy. Looks like the distances to the cluster centroids are not as informative as the original images.\n\n# *Now, we can try appending the features from the reduced set to the original features*\n\n# In[20]:\n\n\nX_train_extended = np.c_[X_train_pca, X_train_reduced]\nX_valid_extended = np.c_[X_valid_pca, X_valid_reduced]\nX_test_extended = np.c_[X_test_pca, X_test_reduced]\n\n\n# In[21]:\n\n\nclf = RandomForestClassifier(n_estimators=150, random_state=42)\nclf.fit(X_train_extended, y_train)\nclf.score(X_valid_extended, y_valid)\n\n\n# That's a bit better, but still worse than without the cluster features. The clusters are not useful to directly train a classifier in this case (but they can still help when labelling new training instances).\n\n# ## Lets try a Gaussian Mixture Model for the Olivetti Faces Dataset\n\n# *Training a Gaussian mixture model on the Olivetti faces dataset and to speed up the algorithm, we are reducing the dataset's dimensionality (e.g., use PCA, preserving 99% of the variance).*\n\n# In[23]:\n\n\nfrom sklearn.mixture import GaussianMixture\n\ngm = GaussianMixture(n_components=40, random_state=42)\ny_pred = gm.fit_predict(X_train_pca)\n\n\n# *Generating some new faces and visualizing them*\n\n# In[24]:\n\n\nn_gen_faces = 20\ngen_faces_reduced, y_gen_faces = gm.sample(n_samples=n_gen_faces)\ngen_faces = pca.inverse_transform(gen_faces_reduced)\n\n\n# In[25]:\n\n\nplot_faces(gen_faces, y_gen_faces)\n\n\n# *Testing the model by modifying some images (e.g., rotate, flip, darken) and see if the model can detect the anomalies (i.e., compare the output of the `score_samples()` method for normal images and for anomalies).*\n\n# In[26]:\n\n\nn_rotated = 4\nrotated = np.transpose(X_train[:n_rotated].reshape(-1, 64, 64), axes=[0, 2, 1])\nrotated = rotated.reshape(-1, 64*64)\ny_rotated = y_train[:n_rotated]\n\nn_flipped = 3\nflipped = X_train[:n_flipped].reshape(-1, 64, 64)[:, ::-1]\nflipped = flipped.reshape(-1, 64*64)\ny_flipped = y_train[:n_flipped]\n\nn_darkened = 3\ndarkened = X_train[:n_darkened].copy()\ndarkened[:, 1:-1] *= 0.3\ny_darkened = y_train[:n_darkened]\n\nX_bad_faces = np.r_[rotated, flipped, darkened]\ny_bad = np.concatenate([y_rotated, y_flipped, y_darkened])\n\nplot_faces(X_bad_faces, y_bad)\n\n\n# In[27]:\n\n\nX_bad_faces_pca = pca.transform(X_bad_faces)\n\n\n# In[28]:\n\n\ngm.score_samples(X_bad_faces_pca)\n\n\n# The bad faces are all considered highly unlikely by the Gaussian Mixture model. Comparing this to the scores of some training instances:\n\n# In[29]:\n\n\ngm.score_samples(X_train_pca[:10])\n\n\n# ## Using Dimensionality Reduction Techniques for Anomaly Detection\n\n# *Using dimensionality reduction techniques for anomaly detection by computing the reconstruction error for each image*\n\n# Using the reduced dataset:\n\n# In[30]:\n\n\nX_train_pca\n\n\n# In[31]:\n\n\ndef reconstruction_errors(pca, X):\n X_pca = pca.transform(X)\n X_reconstructed = pca.inverse_transform(X_pca)\n mse = np.square(X_reconstructed - X).mean(axis=-1)\n return mse\n\n\n# In[32]:\n\n\nreconstruction_errors(pca, X_train).mean()\n\n\n# In[34]:\n\n\nreconstruction_errors(pca, X_bad_faces).mean()\n\n\n# In[37]:\n\n\nplot_faces(X_bad_faces, y_bad)\n\n\n# In[38]:\n\n\nX_bad_faces_reconstructed = pca.inverse_transform(X_bad_faces_pca)\nplot_faces(X_bad_faces_reconstructed, y_bad)\n\n\n# In[ ]:\n\n\n\n\n","repo_name":"PallaviHans17/Unsupervised-Learning","sub_path":"Applying Unsupervised Learning Techniques to Image dataset.py","file_name":"Applying Unsupervised Learning Techniques to Image dataset.py","file_ext":"py","file_size_in_byte":11414,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"25931819684","text":"from location import Location\nfrom stack import Stack\n\nclass Maze():\n BREAD = 'B'\n EXIT = 'X'\n WALL = '#'\n PATH = ' '\n\n def __init__(self, fileName):\n file = open(fileName, \"r\")\n self.maze = self.make_maze(fileName)\n self.current = Location(1,0)\n self.paths = Stack()\n self.moves = [self.current]\n self.oops_index = 0\n\n def make_maze(self, fileName):\n file = open(fileName, \"r\")\n str_list = file.read().split('\\n')\n maze = []\n\n for item in str_list:\n maze.append(list(item))\n\n return maze\n\n def solve_it(self):\n self.dropBC()\n self.current.moveRight()\n wandering = self.getMazeLocMark(self.current) != Maze.EXIT\n\n while wandering:\n self.dropBC()\n nextStep = self.pickDirection()\n self.moves.append(nextStep)\n self.takeAStep(nextStep)\n wandering = self.getMazeLocMark(self.current) != Maze.EXIT\n\n return self.maze, self.moves\n\n def pickDirection(self):\n choices = self.getPaths()\n\n if choices > 1:\n self.oops_index = len(self.moves) - 1\n elif choices == 0:\n self.wipe()\n\n return self.paths.pop()\n\n def wipe(self):\n \"\"\"\n Remove bread crumbs from dead end moves\n :return:\n \"\"\"\n for idx in range(len(self.moves) - 1, self.oops_index, -1):\n self.setMazeLocMark(self.moves[idx], Maze.PATH)\n\n def takeAStep(self, dir):\n self.current = dir;\n\n def getPaths(self):\n count = 0\n mark = self.get_path_up()\n if mark == Maze.PATH or mark == Maze.EXIT:\n self.paths.push(Location(self.current.row - 1, self.current.col))\n count += 1\n\n mark = self.get_path_right()\n if mark == Maze.PATH or mark == Maze.EXIT:\n self.paths.push(Location(self.current.row, self.current.col + 1))\n count += 1\n\n mark = self.get_path_down()\n if mark == Maze.PATH or mark == Maze.EXIT:\n self.paths.push(Location(self.current.row + 1, self.current.col))\n count += 1\n\n mark = self.get_path_left()\n if mark == Maze.PATH or mark == Maze.EXIT:\n self.paths.push(Location(self.current.row, self.current.col - 1))\n count += 1\n\n return count\n\n def get_path_up(self):\n return self.getMazeLocMark(Location(self.current.row - 1, self.current.col))\n\n def get_path_right(self):\n return self.getMazeLocMark(Location(self.current.row, self.current.col + 1))\n\n def get_path_down(self):\n return self.getMazeLocMark(Location(self.current.row + 1, self.current.col))\n\n def get_path_left(self):\n return self.getMazeLocMark(Location(self.current.row, self.current.col - 1))\n\n def dropBC(self):\n self.setMazeLocMark(self.current, Maze.BREAD)\n\n def setMazeLocMark(self, spot, mark):\n self.maze[spot.row][spot.col] = mark\n\n def getMazeLocMark(self, spot):\n #print(spot.getRow(), spot.getCol())\n return self.maze[spot.getRow()][spot.getCol()]\n\n def __str__(self):\n return str(self.maze)","repo_name":"peterdcasey/Maze2","sub_path":"Maze/maze.py","file_name":"maze.py","file_ext":"py","file_size_in_byte":3177,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"9656924254","text":"'''\nObjectDisplay: widget that handle fiducial object, and draw them\n'''\n\n__all__ = ('MTObjectDisplay', )\n\nfrom OpenGL.GL import glTranslatef, glRotatef, glVertex2f, GL_LINES\nfrom pymt.graphx import gx_matrix, gx_begin, set_color, drawCSSRectangle\nfrom pymt.ui.widgets.widget import MTWidget\nfrom math import pi\n\nclass MTObjectDisplay(MTWidget):\n '''MTObjectDisplay is a widget who draw objects on table'''\n def __init__(self, **kwargs):\n super(MTObjectDisplay, self).__init__(**kwargs)\n self.objects = {}\n\n def on_touch_down(self, touch):\n if not 'markerid' in touch.profile:\n return\n self.objects[touch.id] = (touch.x, touch.y, -touch.a * 180. / pi)\n\n def on_touch_move(self, touch):\n if touch.id in self.objects:\n self.objects[touch.id] = (touch.x, touch.y, -touch.a * 180. / pi)\n\n def on_touch_up(self, touch):\n if touch.id in self.objects:\n del self.objects[touch.id]\n\n def draw(self):\n if not self.visible:\n return\n\n for objectID in self.objects:\n x, y, angle = self.objects[objectID]\n with gx_matrix:\n glTranslatef(x, y, 0.0)\n glRotatef(angle, 0.0, 0.0, 1.0)\n\n set_color(.5)\n drawCSSRectangle(\n pos=(-0.5 * self.width, -0.5 * self.height),\n size=(self.width, self.height),\n style=self.style\n )\n\n set_color(*self.style['vector-color'])\n with gx_begin(GL_LINES):\n glVertex2f(0., 0.)\n glVertex2f(0., -0.5 * self.height)\n","repo_name":"tito/pymt","sub_path":"pymt/ui/widgets/objectdisplay.py","file_name":"objectdisplay.py","file_ext":"py","file_size_in_byte":1664,"program_lang":"python","lang":"en","doc_type":"code","stars":97,"dataset":"github-code","pt":"70"} +{"seq_id":"17102943785","text":"import pyutilib.th as unittest\nimport pyomo.environ as pe\nimport pyomo.gdp as gdp\nfrom pyomo.gdp.util import check_model_algebraic\nfrom pyomo.common.log import LoggingIntercept\nimport logging\nfrom six import StringIO\n\n\nclass TestGDPReclassificationError(unittest.TestCase):\n def test_disjunct_not_in_disjunction(self):\n m = pe.ConcreteModel()\n m.x = pe.Var()\n m.d1 = gdp.Disjunct()\n m.d1.c = pe.Constraint(expr=m.x == 1)\n m.d2 = gdp.Disjunct()\n m.d2.c = pe.Constraint(expr=m.x == 0)\n pe.TransformationFactory('gdp.bigm').apply_to(m)\n log = StringIO()\n with LoggingIntercept(log, 'pyomo.gdp', logging.WARNING):\n check_model_algebraic(m)\n self.assertRegexpMatches( log.getvalue(), \n '.*not found in any Disjunctions.*')\n\n def test_disjunct_not_in_active_disjunction(self):\n m = pe.ConcreteModel()\n m.x = pe.Var()\n m.d1 = gdp.Disjunct()\n m.d1.c = pe.Constraint(expr=m.x == 1)\n m.d2 = gdp.Disjunct()\n m.d2.c = pe.Constraint(expr=m.x == 0)\n m.disjunction = gdp.Disjunction(expr=[m.d1, m.d2])\n m.disjunction.deactivate()\n pe.TransformationFactory('gdp.bigm').apply_to(m)\n log = StringIO()\n with LoggingIntercept(log, 'pyomo.gdp', logging.WARNING):\n check_model_algebraic(m)\n self.assertRegexpMatches(log.getvalue(), \n '.*While it participates in a Disjunction, '\n 'that Disjunction is currently deactivated.*')\n","repo_name":"sauceboidk/pyomo","sub_path":"pyomo/gdp/tests/test_gdp_reclassification_error.py","file_name":"test_gdp_reclassification_error.py","file_ext":"py","file_size_in_byte":1584,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"1403017115","text":"import pygame\n\n\nclass GameSettings:\n # All settings for the Tweetio game are stored here\n def __init__(self, screen_dimensions=(800, 1200)):\n # initialize the game's settings.\n # Screen Settings\n title = 'Tweetio'\n\n colour_values = (0, 0, 0)\n pygame.display.set_caption(title)\n # set screen width\n self.screen_width = screen_dimensions[0]\n # set screen length\n self.screen_height = screen_dimensions[1]\n # set background colour\n self.bg_colour = colour_values\n","repo_name":"Diogenesoftoronto/Tweetio_Repo","sub_path":"tweetio_settings.py","file_name":"tweetio_settings.py","file_ext":"py","file_size_in_byte":543,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"74101970145","text":"import requests #Send and receive HTTP requests\r\nimport json #Convert to and from JSON\r\nimport os #Operating system\r\n\r\nREST = os.getenv(\"REST\") or \"127.0.0.1:5000\" #Load the rest host\r\n\r\n#MakeRequest: makes a predetermined request\r\n#Input: The HTTP method, the endpoint the request is going to, the data being sent\r\n#Output: The response\r\ndef makeRequest(method, endpoint, data):\r\n\tprint(f\"Response to http://{REST}/{endpoint} request is\") #Print where the request is going\r\n\tjsonData = json.dumps(data) #Dump the data into a json format\r\n\tresponse = method(f\"http://{REST}/{endpoint}\", data=jsonData, headers={'Content-type': 'application/json'}) #Send the request.\r\n\tprint(f\"response code {response.status_code}, raw response is {response.text}\") #Print what comes back\r\n\treturn response.text #Return the response\r\n\r\n\r\nmakeRequest(requests.post, \"api/postData\", {\"sentData\" : \"Hello\"}) #Make the request for testing purposes","repo_name":"tylerpaik/BigDataArchitecture","sub_path":"API/sampleRequests.py","file_name":"sampleRequests.py","file_ext":"py","file_size_in_byte":926,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"33027063216","text":"#! /usr/bin/env python3\nimport rospy\nimport numpy as np\nimport cv2 as cv\nimport matplotlib.pyplot as plt\nfrom std_msgs.msg import String\nfrom sensor_msgs.msg import Image\n#from detection import side\nfrom cv_bridge import CvBridge\n\npub = rospy.Publisher('arrow_dir', String, queue_size=10)\n\n# Function to increase brightness of the cv Image Object\ndef increase_brightness(img,value):\n hsv = cv.cvtColor(img, cv.COLOR_BGR2HSV)\n h, s, v = cv.split(hsv)\n\n lim = 255 - value\n v[v > lim] = 255\n v[v <= lim] += value\n\n final_hsv = cv.merge((h, s, v))\n img = cv.cvtColor(final_hsv, cv.COLOR_HSV2BGR)\n return img\n\n# Function to detect the arrow and classify its direction\ndef detect(Image):\n bridge = CvBridge()\n global detected\n image = bridge.imgmsg_to_cv2(Image, desired_encoding='passthrough')\n #image = increase_brightness(cv_image, value = 150)\n image = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n _, threshold = cv.threshold(image, 50, 255, cv.THRESH_BINARY)\n contours,_=cv.findContours(threshold, cv.RETR_TREE,cv.CHAIN_APPROX_SIMPLE)\n #print(contours)\n \n for cnt in contours :\n area = cv.contourArea(cnt)\n if area > 400: \n #print(area)\n approx = cv.approxPolyDP(cnt, 0.009 * cv.arcLength(cnt, True), True)\n if(len(approx) == 7): \n cv.drawContours(image, [approx], 0, (0, 255, 0), 5)\n min_o = []\n for i in approx:\n min_o.append(i[0][0])\n min_o.sort()\n if ((min_o[1] - min_o[0] > 5) and (min_o[-1] - min_o[-2] < 5)):\n #print (\"go Left\")\n pub.publish('left')\n\n elif ((min_o[1] - min_o[0] < 5) and (min_o[-1] - min_o[-2] > 5)):\n #print (\"go Right\")\n pub.publish('right')\n\nimage_message = rospy.Subscriber(\"/camera/image_raw\",Image,detect)\n\n\nif __name__ == '__main__':\n rospy.init_node('arrow_detector', anonymous=True)\n rospy.spin()\n\n","repo_name":"sushantpeace10/Robofest-2021","sub_path":"scripts/arrow_detect.py","file_name":"arrow_detect.py","file_ext":"py","file_size_in_byte":2016,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"14076026823","text":"import sys\n\nsys.path.insert(0, '/Users/momantang/PycharmProjects/cobrass/')\nsys.path\nimport mpl_finance as mplf\nimport tushare as ts\nimport pandas as pd\n\nfrom local import local_setting as ls\nfrom matplotlib import pyplot as plt\nfrom matplotlib.pylab import date2num\nimport datetime\nfrom abupy import ABuSymbolPd, ABuMarketDrawing, AbuSymbolCN\n\n__color_up__ = 'red'\n__color_down__ = 'green'\n\nif __name__ == '__main__':\n ts.set_token(ls.LocalSetting.tushare_token)\n ts_pro = ts.pro_api()\n df_zgpa = ts_pro.query('daily', ts_code='601318.SH')\n # df_zgpa.to_csv(ls.LocalSetting.data_path + \"601318.csv\")\n # df_zgpa = pd.read_csv(ls.LocalSetting.data_path + \"601318.csv\")\n # print(df_zgpa.head())\n df_index = ABuSymbolPd.make_kl_df('000001')\n df = ABuSymbolPd.make_kl_df('601318')\n # df_index = df_zgpa\n # df_index.set_index('trade_date', inplace=True)\n # print(df_zgpa.tail())\n qutotes1 = []\n qutotes2 = []\n fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1, figsize=(14, 7))\n for index, (d, o, c, h, l) in enumerate(zip(df_index.index, df_index.open, df_index.close, df_index.high, df_index.low)):\n d = date2num(d)\n val = (d, o, c, h, l)\n qutotes1.append(val)\n pass\n print(df_index.dtypes)\n print(df_zgpa.dtypes)\n mplf.candlestick_ochl(ax1, qutotes1, width=0.6, colorup=__color_up__, colordown=__color_down__)\n ax1.xaxis_date()\n\n for index, (d, o, c, h, l) in enumerate(zip(df_zgpa.trade_date, df_zgpa.open, df_zgpa.close, df_zgpa.high, df_zgpa.low)):\n date_time = datetime.datetime.strptime(d, '%Y%m%d')\n t = date2num(date_time)\n val = (t, o, c, h, l)\n qutotes2.append(val)\n pass\n mplf.candlestick_ochl(ax2, qutotes2, width=0.6, colorup=__color_up__, colordown=__color_down__)\n ax2.xaxis_date()\n plt.show()\n","repo_name":"momantang/cobrass","sub_path":"demo/k_demo.py","file_name":"k_demo.py","file_ext":"py","file_size_in_byte":1836,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"12900262716","text":"from __future__ import print_function\nimport datetime\nfrom googleapiclient.discovery import build\nfrom httplib2 import Http\nfrom oauth2client import file, client, tools\n\nimport pytz\nimport cmsc_calendar\nimport secret\n\nSCOPES = 'https://www.googleapis.com/auth/calendar'\nCAL_ID = \"\"\n\ndef main():\n # Get creds to use api\n store = file.Storage('token.json')\n creds = store.get()\n if not creds or creds.invalid:\n flow = client.flow_from_clientsecrets('credentials.json', SCOPES)\n creds = tools.run_flow(flow, store)\n service = build('calendar', 'v3', http=creds.authorize(Http()))\n\n # Get event log\n with open('event_log.txt', 'a+') as f:\n f.close()\n with open('event_log.txt', 'r+') as f:\n event_log = set(f.read().split(\"\\n\"))\n\n # Set google calendar\n if CAL_ID == \"\":\n # make google calendar\n calendar = {\n 'summary': 'CMSC Undergrad Events',\n 'timeZone': 'America/New_York'\n }\n goog_cal = service.calendars().insert(body=calendar).execute()\n else:\n goog_cal = service.calendarList().get(calendarId=CAL_ID).execute()\n\n # get cmsc calendar\n c = cmsc_calendar.Calendar()\n c.connect()\n\n # add events\n est = pytz.timezone(\"US/Eastern\")\n for e in c.events:\n # skip if logged\n if e.href in event_log:\n continue\n # add event to google calendar\n event = {\n 'summary':e.title,\n 'description':e.content,\n 'start':{\n 'dateTime':e.get_start()+('-04:00' if bool(est.localize(e.start_time).dst()) else '-05:00'),\n 'timeZone':'America/New_York'\n },\n 'end':{\n 'dateTime':e.get_end()+('-04:00' if bool(est.localize(e.end_time).dst()) else '-05:00'),\n 'timeZone':'America/New_York'\n }\n }\n event = service.events().insert(calendarId=goog_cal['id'], body=event).execute()\n\n # append to event log\n with open('event_log.txt', 'a') as f:\n f.write(e.href+\"\\n\")\n f.close()\n \nif __name__ == '__main__':\n CAL_ID = secret.CAL_ID\n main()\n","repo_name":"jasmaabox/cmsc-undergrad-calendar","sub_path":"script.py","file_name":"script.py","file_ext":"py","file_size_in_byte":2172,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"74928531746","text":"import lanternfish\nimport optimizedLanternFish\n\ndataFile = open('./input', 'r')\ndata = dataFile.read().split(',')\ndataFile.close()\n\ndata = [3,4,3,1,2]\n\nfishSchool = []\nfor fish in data:\n fishSchool.append(lanternfish.lanternfish(int(fish)))\n\n#outputFile = open('./output', 'w')\n#outputFile.write('')\n#outputFile.close()\ni = 0\nfor day in range(80):\n for fish in fishSchool:\n newFish = fish.dayChange()\n if newFish:\n fishSchool.append(newFish)\n \n population = []\n for fish in fishSchool:\n population.append(str(fish.cycle))\n\n report = f\"Day {day}: \"\n report = report + \",\".join(population) + '\\n'\n \n #outputFile = open('./output', 'a')\n #outputFile.write(report)\n #outputFile.close()\n\npopulationSize = len(fishSchool)\nprint(f\"Population: {populationSize}\")\n\noptimizedFish = optimizedLanternFish.OptimizedLanternFish(data)\nday = 0\nfor day in range(256):\n day += 1\n print(f\"Day: {day}\")\n optimizedFish.dayChange()\n\nprint(f\"Optimized population: {len(optimizedFish.schoolOfFish)}\")","repo_name":"Zohmer/adventofcode","sub_path":"6/puzzle1.py","file_name":"puzzle1.py","file_ext":"py","file_size_in_byte":1050,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"17253158853","text":"import requests\nfrom django.conf import settings\n\nKEY_REQUEST_MSG_SUCCESS = \"message_success\"\nKEY_REQUEST_MSG_ERROR = \"message_error\"\n\n\ndef addMessageSuccess(array, message):\n array[KEY_REQUEST_MSG_SUCCESS] = message\n\n\ndef addMessageError(array, message):\n array[KEY_REQUEST_MSG_ERROR] = message\n\n\ndef moveSessionMessageToContext(key, request, array):\n if request.session.get(key):\n if key == KEY_REQUEST_MSG_SUCCESS:\n addMessageSuccess(array, request.session[key])\n else:\n addMessageError(array, request.session[key])\n del request.session[key]\n request.session.modified = True\n\n\ndef isCaptchaValid(token):\n if settings.DEBUG:\n return True\n \n params = {\n \"secret\": settings.HCAPTCHA_SECRET_KEY,\n \"response\": token\n }\n response = requests.post('https://hcaptcha.com/siteverify', params)\n json = response.json()\n if 'success' not in json or not json['success']:\n return False\n return True\n","repo_name":"ThaNico/HistoricTimes","sub_path":"historic_times/utils/requestUtil.py","file_name":"requestUtil.py","file_ext":"py","file_size_in_byte":1002,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"70132989987","text":"\n\ndef main():\n\n num = int(input(\"Digite o numero de nomes :\"))\n\n names=[]\n aux=[]\n\n for i in range(num):\n names.append(input(\"Digite o \" + str(i) + \"º nome :\"))\n\n cont=0\n\n for o in range(num-1):\n for i in range (0,num-o-1):\n if names[i]>names[i+1]:\n aux.append(names[i+1])\n names[i+1]=names[i]\n names[i]=aux[cont]\n cont = cont + 1\n\n for i in range(num):\n print(str(i + 1) + \"º nome =\" + names[i])\n\nif __name__ == '__main__':\n main()","repo_name":"ArturMachado12/Python-Study","sub_path":"Exercice 2.py","file_name":"Exercice 2.py","file_ext":"py","file_size_in_byte":550,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"8498432720","text":"H = 10010000007 ** 2\nM = 100000000000000007 ** 2\n\n\ndef check_lists(a, b):\n if len(a) != len(b):\n return False\n for i in range(len(a)):\n if a[i] != b[i]:\n return False\n return True\n\n\ndef lcsn(s1, s2_list, n, h=H, m=M, isList=True):\n # print('len s2_list', len(s2_list))\n\n hs = [h % m]\n for i in range(1, n):\n hs.append((hs[-1] * h) % m)\n if isList is False:\n s1 = [ord(e) for e in list(s1)]\n s2 = [ord(e) for e in list(s2_list)]\n th = 0\n s = dict()\n for i in range(n):\n th = (th + s1[i] * hs[n - i - 1]) % m\n for i in range(n, len(s1)):\n if th not in s:\n s[th] = i - n\n else:\n if check_lists(s1[s[th]:s[th] + n], s1[i - n:i]) is False:\n print('collision')\n print(s1[s[th]:s[th] + n])\n print(s1[i - n:i])\n raise\n th = ((th - (s1[i - n] * hs[-1]) % m + m) * h + hs[0] * s1[i]) % m\n # print(s1[len(s1) - n: len(s1)], th)\n if th not in s:\n s[th] = len(s1) - n\n else:\n if check_lists(s1[s[th]:s[th] + n], s1[len(s1) - n:len(s1)]) is False:\n print('collision0')\n print(s1[s[th]:s[th] + n])\n print(s1[len(s1) - n:len(s1)])\n raise\n\n res = []\n for j, s2 in enumerate(s2_list):\n if len(s2) < n:\n res.append(None)\n continue\n th = 0\n for i in range(n):\n th = (th + s2[i] * hs[n - i - 1]) % m\n for i in range(n, len(s2)):\n if th in s:\n if check_lists(s2[i - n: i], s1[s[th]:s[th] + n]) is False:\n print('collision1')\n print(s2[i - n: i])\n print(s1[s[th]:s[th] + n])\n raise\n res.append(s2[i - n: i])\n break\n th = ((th - (s2[i - n] * hs[-1]) % m + m) * h + hs[0] * s2[i]) % m\n if len(res) != (j + 1):\n if th in s:\n if check_lists(s2[len(s2) - n: len(s2)], s1[s[th]:s[th] + n]) is False:\n print('collision2')\n print(s[th])\n print(s2)\n print(s2[len(s2) - n: len(s2)])\n print(s1[s[th]:s[th] + n])\n print(check_lists(s2[len(s2) - n: len(s2)], s1[s[th]:s[th] + n]))\n raise\n res.append(s2[len(s2) - n: len(s2)])\n if len(res) != (j + 1):\n res.append(None)\n # print(len(s2_list), len(res))\n return res\n\n\ndef lcs(s1, s2, h=H, m=M, isList=True, isPrint=True):\n l = 0\n r = min(len(s1), len(s2))\n while (l < r):\n if isPrint is True:\n print(l, r)\n x = (l + r + 1) // 2\n if lcsn(s1, [s2], x, h=h, m=m, isList=isList)[0] is not None:\n l = x\n else:\n r = x - 1\n if l == 0:\n return ''\n else:\n if isPrint is True:\n print(l)\n return lcsn(s1, [s2], l, h=h, m=m, isList=True)[0]\n\n\ndef lcs_dict(s1, s2_dict, h=H, m=M, isList=True):\n res = dict()\n for e in s2_dict:\n res[e] = [0, '']\n s2_dict_cur = s2_dict.copy()\n x = 0\n while len(s2_dict_cur) > 0:\n x += 1\n print(x, len(s2_dict_cur))\n z = lcsn(s1, [e[1] for e in sorted(s2_dict_cur.items(), key=lambda x: x[0])], x, h=h, m=m, isList=isList)\n for i, e in enumerate(sorted(s2_dict_cur.items(), key=lambda x: x[0])):\n if z[i] is not None:\n res[e[0]][0] = x\n res[e[0]][1] = z[i]\n else:\n s2_dict_cur.pop(e[0])\n return res","repo_name":"Topspin26/SberbankDataScienceContest_2017","sub_path":"lcs.py","file_name":"lcs.py","file_ext":"py","file_size_in_byte":3641,"program_lang":"python","lang":"en","doc_type":"code","stars":8,"dataset":"github-code","pt":"70"} +{"seq_id":"28673569903","text":"\"\"\"\nСоздайте вручную кортеж содержащий элементы разных типов.\nПолучите из него словарь списков, где:\nключ - тип элемента,\nзначение - список элементов данного типа.\n\"\"\"\n# Решение № 1\ntuple_obj = (1, 2.1, True, None, 'string', 3, 4, 5, False, 'elem')\ndct = {}\nfor item in tuple_obj:\n obj_type = type(item)\n lst = []\n for elem in tuple_obj:\n if type(elem) == obj_type:\n lst.append(elem)\n dct[obj_type] = lst\nprint(dct)\n\n\n# Решение № 2\n# data = (1, 2.1, True, None, 'string', 3, 4, 5, False, 'elem')\n#\n# result_dict = dict()\n#\n# for el in data:\n# el_type = str(type(el))\n# if el_type not in result_dict.keys():\n# result_dict[el_type] = [el]\n# else:\n# result_dict[el_type].append(el)\n# print(result_dict)","repo_name":"KostyaSt2022/auto_python_gb","sub_path":"seminar_3/sem3_task_3.py","file_name":"sem3_task_3.py","file_ext":"py","file_size_in_byte":905,"program_lang":"python","lang":"ru","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"32612959393","text":"import pickle\nimport os\nimport six\nimport json\nimport numpy as np\n\ndef load_pickle_BeRNN(file):\n try:\n with open(file, 'rb') as f:\n data = pickle.load(f)\n except UnicodeDecodeError as e:\n with open(file, 'rb') as f:\n data = pickle.load(f, encoding='latin1')\n except Exception as e:\n print('Unable to load data ', file, ':', e)\n raise\n return data\n\nrules_dict = \\\n {'BeRNN' : ['DM', 'DM Anti', 'EF', 'EF Anti', 'RP', 'RP Anti', 'RP Ctx1',\n 'RP Ctx2', 'WM', 'WM Anti', 'WM Ctx1', 'WM Ctx2']}\n\ndef get_num_ring_BeRNN(ruleset):\n '''get number of stimulus rings'''\n return 2\n\ndef get_num_rule_BeRNN(ruleset):\n '''get number of rules'''\n return len(rules_dict[ruleset])\n\ndef get_default_hp_BeRNN(ruleset):\n '''Get a default hp.\n\n Useful for debugging.\n\n Returns:\n hp : a dictionary containing training hpuration\n '''\n num_ring = get_num_ring_BeRNN(ruleset)\n n_rule = get_num_rule_BeRNN(ruleset)\n\n n_eachring = 32\n n_input, n_output = 1+num_ring*n_eachring+n_rule, n_eachring+1\n hp = {\n # input type: normal, multi\n 'in_type': 'normal',\n # Type of RNNs: LeakyRNN, LeakyGRU, EILeakyGRU, GRU, LSTM\n 'rnn_type': 'LeakyRNN',\n # whether rule and stimulus inputs are represented separately\n 'use_separate_input': False,\n # Type of loss functions\n 'loss_type': 'lsq',\n # Optimizer\n 'optimizer': 'adam',\n # Type of activation functions, relu, softplus, tanh, elu\n 'activation': 'relu',\n # Time constant (ms)\n 'tau': 100,\n # discretization time step (ms)\n 'dt': 20,\n # discretization time step/time constant\n 'alpha': 0.2,\n # recurrent noise\n 'sigma_rec': 0.05,\n # input noise\n 'sigma_x': 0.01,\n # leaky_rec weight initialization, diag, randortho, randgauss\n 'w_rec_init': 'randortho',\n # a default weak regularization prevents instability\n 'l1_h': 0,\n # l2 regularization on activity\n 'l2_h': 0,\n # l1 regularization on weight\n 'l1_weight': 0,\n # l2 regularization on weight\n 'l2_weight': 0,\n # l2 regularization on deviation from initialization\n 'l2_weight_init': 0,\n # proportion of weights to train, None or float between (0, 1)\n 'p_weight_train': None,\n # number of units each ring\n 'n_eachring': n_eachring,\n # number of rings\n 'num_ring': num_ring,\n # number of rules\n 'n_rule': n_rule,\n # first input index for rule units\n 'rule_start': 1+num_ring*n_eachring,\n # number of input units\n 'n_input': n_input,\n # number of output units\n 'n_output': n_output,\n # number of recurrent units\n 'n_rnn': 256,\n # random number used for several random initializations\n 'rng' : np.random.RandomState(seed=0),\n # number of input units\n 'ruleset': ruleset,\n # name to save\n 'save_name': 'test_model',\n # learning rate\n 'learning_rate': 0.001,\n # # intelligent synapses parameters, tuple (c, ksi)\n # 'c_intsyn': 0,\n # 'ksi_intsyn': 0,\n }\n\n return hp\n\ndef save_hp_BeRNN(hp, model_dir):\n \"\"\"Save the hyper-parameter file of model save_name\"\"\"\n hp_copy = hp.copy()\n hp_copy.pop('rng') # rng can not be serialized\n with open(os.path.join(model_dir, 'hp.json'), 'w') as f:\n json.dump(hp_copy, f)\n\n# def mkdir_p_BeRNN(path):\n# \"\"\"\n# Portable mkdir -p\n#\n# \"\"\"\n# try:\n# os.makedirs(path)\n# # except OSError as e:\n# # if e.errno == errno.EEXIST and os.path.isdir(path):\n# # pass\n# # else:\n# # raise\n\ndef gen_feed_dict_BeRNN(model, Input, Output, hp):\n \"\"\"Generate feed_dict for session run.\"\"\"\n if hp['in_type'] == 'normal':\n feed_dict = {model.x: Input,\n model.y: Output}\n else:\n raise ValueError()\n\n return feed_dict\n\ndef popvec_BeRNN(y):\n \"\"\"Population vector read out.\n\n Assuming the last dimension is the dimension to be collapsed\n\n Args:\n y: population output on a ring network. Numpy array (Batch, Units)\n\n Returns:\n Readout locations: Numpy array (Batch,)\n \"\"\"\n pref = np.arange(0, 2*np.pi, 2*np.pi/y.shape[-1]) # preferences\n temp_sum = y.sum(axis=-1)\n temp_cos = np.sum(y*np.cos(pref), axis=-1)/temp_sum\n temp_sin = np.sum(y*np.sin(pref), axis=-1)/temp_sum\n loc = np.arctan2(temp_sin, temp_cos)\n return np.mod(loc, 2*np.pi)\n\ndef get_perf_BeRNN(y_hat, y_loc):\n \"\"\"Get performance.\n\n Args:\n y_hat: Actual output. Numpy array (Time, Batch, Unit)\n y_loc: Target output location (-1 for fixation).\n Numpy array (Time, Batch)\n\n Returns:\n perf: Numpy array (Batch,)\n \"\"\"\n if len(y_hat.shape) != 3:\n raise ValueError('y_hat must have shape (Time, Batch, Unit)')\n # Only look at last time points\n y_loc = y_loc[-1]\n y_hat = y_hat[-1]\n\n # Fixation and location of y_hat\n y_hat_fix = y_hat[..., 0]\n y_hat_loc = popvec_BeRNN(y_hat[..., 1:])\n\n # Fixating? Correctly saccading?\n fixating = y_hat_fix > 0.5\n\n original_dist = y_loc - y_hat_loc\n dist = np.minimum(abs(original_dist), 2*np.pi-abs(original_dist))\n corr_loc = dist < 0.2*np.pi\n\n # Should fixate?\n should_fix = y_loc < 0\n\n # performance\n perf = should_fix * fixating + (1-should_fix) * corr_loc * (1-fixating)\n return perf\n\ndef save_log_BeRNN(log):\n \"\"\"Save the log file of model.\"\"\"\n model_dir = log['model_dir']\n fname = os.path.join(model_dir, 'log.json')\n with open(fname, 'w') as f:\n json.dump(log, f)\n\ndef load_log_BeRNN(model_dir):\n \"\"\"Load the log file of model save_name\"\"\"\n fname = os.path.join(model_dir, 'log.json')\n if not os.path.isfile(fname):\n return None\n\n with open(fname, 'r') as f:\n log = json.load(f)\n return log\n\ndef load_hp_BeRNN(model_dir):\n \"\"\"Load the hyper-parameter file of model save_name\"\"\"\n fname = os.path.join(model_dir, 'hp.json')\n if not os.path.isfile(fname):\n fname = os.path.join(model_dir, 'hparams.json') # backward compat\n if not os.path.isfile(fname):\n return None\n\n with open(fname, 'r') as f:\n hp = json.load(f)\n\n # Use a different seed aftering loading,\n # since loading is typically for analysis\n hp['rng'] = np.random.RandomState(hp['seed']+1000)\n return hp\n","repo_name":"olivarfrenk/multitask_BeRNN","sub_path":"Tools.py","file_name":"Tools.py","file_ext":"py","file_size_in_byte":6828,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"24318392656","text":"# -*- coding: utf-8 -*-\nimport json\nimport pprint\nimport logging\nimport requests\n\nfrom werkzeug import urls, utils\nfrom odoo import http, _\nfrom odoo.http import request\nfrom odoo.exceptions import ValidationError, UserError\n\n_logger = logging.getLogger(__name__)\n\nclass BarionController(http.Controller):\n\n @http.route(['/payment/barion/callback'], type='http', auth='public', csrf=False)\n def payment_barion_callback(self, paymentId, **post):\n acquirer_reference = paymentId if paymentId else post.get('paymentId')\n transaction = request.env['payment.transaction'].sudo().search([('acquirer_reference', '=', acquirer_reference)], limit=1)\n \n resp = transaction.get_status()\n \n status = resp.get(\"Status\")\n _logger.info(\"Barion payment status: %s\", status)\n \n transaction.process(status)\n\n return \"OK\"\n\n\n @http.route(['/payment/barion/prepare_transaction'], type='http', auth='public', csrf=False, website=True)\n def payment_barion_prepare_transaction(self, **post):\n \n #baseurl = http.request.env['ir.config_parameter'].sudo().get_param('web.base.url')\n baseurl = \"https://\" + request.website.domain\n transaction = request.env['payment.transaction'].sudo().search([('reference', '=', post.get('reference'))], limit=1)\n barion_items = []\n for order in transaction.sale_order_ids:\n #order.action_done()\n order_lines = order.order_line if isinstance(order.order_line, list) else [order.order_line]\n for line in order_lines:\n #_logger.info('Order line: %s', pprint.pformat(line.fields_get()))\n barion_items.append({\n \"Name\": line.name_short,\n \"Description\": line.name,\n \"Quantity\": line.product_uom_qty,\n \"Unit\": line.product_uom.name,\n \"UnitPrice\": line.price_unit,\n \"ItemTotal\": line.price_total,\n \"SKU\": line.product_id.default_code\n })\n \n barion_data = {\n \"POSKey\": transaction.acquirer_id.barion_private_key,\n \"PaymentType\": transaction.acquirer_id.barion_payment_type,\n \"PaymentRequestId\": transaction.reference,\n \"GuestCheckOut\": \"true\",\n \"FundingSources\": [\"All\"],\n \"Currency\": transaction.currency_id.name,\n \"Transactions\": [\n {\n \"POSTransactionId\": transaction.reference,\n \"Payee\": transaction.acquirer_id.barion_payee,\n \"Total\": transaction.amount,\n \"Items\": barion_items\n }\n ],\n \"RedirectUrl\": baseurl + transaction.return_url,\n \"CallbackUrl\": baseurl + \"/payment/barion/callback\"\n }\n\n if transaction.acquirer_id.barion_payment_type == \"DelayedCapture\":\n barion_data[\"DelayedCapturePeriod\"] = '7.00:00:00'\n\n _logger.info('Barion data %s', pprint.pformat(barion_data))\n\n url = 'https://api.test.barion.com/v2/Payment/Start' if transaction.acquirer_id.state == 'test' else 'https://api.barion.com/v2/Payment/Start'\n resp = requests.post(url, json=barion_data)\n _logger.info(\"Barion request: Received response:\\n%s\", resp.content)\n\n resp.raise_for_status()\n resp = json.loads(resp.content)\n\n transaction.write({\n \"acquirer_reference\": resp.get(\"PaymentId\")\n })\n\n return utils.redirect(resp.get(\"GatewayUrl\"))\n","repo_name":"suningwz/odoo-sh-eyssen","sub_path":"payment_barion/controllers/main.py","file_name":"main.py","file_ext":"py","file_size_in_byte":3580,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"30787788859","text":"import tensorflow as tf\nimport tensorflow_hub as hub\nimport tensorflow_text as text\n\n\ndef build_model():\n bert_model_name = 'small_bert/bert_en_uncased_L-4_H-512_A-8'\n tfhub_handle_preprocess = 'https://tfhub.dev/tensorflow/bert_en_uncased_preprocess/3'\n tfhub_handle_encoder = 'https://tfhub.dev/tensorflow/small_bert/bert_en_uncased_L-4_H-512_A-8/1'\n\n text_input = tf.keras.layers.Input(\n shape=(), dtype=tf.string, name='text')\n preprocessing_layer = hub.KerasLayer(\n tfhub_handle_preprocess, name='preprocessing')\n encoder_inputs = preprocessing_layer(text_input)\n encoder = hub.KerasLayer(\n tfhub_handle_encoder, trainable=True, name='BERT_encoder')\n outputs = encoder(encoder_inputs)\n net = outputs['pooled_output']\n net = tf.keras.layers.Dropout(0.1)(net)\n net = tf.keras.layers.Dense(\n 1, activation=None, name='classifier')(net)\n\n return tf.keras.Model(text_input, net)\n","repo_name":"samueltober/Patent-classification","sub_path":"models/baseline_bert.py","file_name":"baseline_bert.py","file_ext":"py","file_size_in_byte":943,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"8175027554","text":"import json\nimport os\nimport re\nimport sys\nfrom collections import Counter\nfrom copy import deepcopy\nfrom io import StringIO\nfrom pathlib import Path\n\n\ndef format_patient(patient_lines, first_line, last_line):\n patient_lines[-1] = patient_lines[-1].replace(\"},\", \"}\")\n patient_str = \"\".join(patient_lines)\n patient_str_full = f\"{first_line}{patient_str}{last_line}\"\n patient_file = StringIO(patient_str_full)\n patient = json.load(patient_file)\n patient_id = list(patient.keys())[0]\n patient_val = list(patient.values())[0]\n patient_val[\"patient_id\"] = patient_id\n formatted_patient = patient_val\n\n # print()\n return formatted_patient\n\n\ndef dump_patient(patient, path, c):\n with open(path, \"a+\") as fout:\n patient_line = json.dumps(patient)\n fout.write(f\"{patient_line}\\n\")\n\n # Keep track of patients dumped for batching\n c[\"total_patients_dumped\"] += 1\n c[\"batch_patients_dumped\"] += 1\n if c[\"batch_patients_dumped\"] >= c[\"num_patients_per_batch\"]:\n c[\"batch_patients_dumped\"] = 0\n c[\"batch_id\"] += 1\n print(f\"Batch: {c['batch_id']}\")\n\n\ndef chunk_big_json(\n input_path,\n output_path,\n num_patients_per_batch,\n):\n\n # Pattern to match patient ID\n patient_id_pattern = re.compile(r'^ {4}\"\\d{8}\": {\\n$')\n\n # header and footer defaults\n first_line = \"{\\n\"\n last_line = \"}\"\n first_loop = True\n\n # Set up vars\n c = Counter()\n c[\"line_num\"] = -1\n c[\"batch_id\"] = 0\n c[\"batch_patients_dumped\"] = 0\n c[\"num_patients_per_batch\"] = num_patients_per_batch\n patient_lines = []\n\n ## Remove files in output dir\n # for root, dirs, files in os.walk(output_dir):\n # for f in files:\n # print(f\"Removing {f}\")\n # os.remove(os.path.join(root, f))\n\n # Create directory after deleting\n Path(output_dir).mkdir(parents=True, exist_ok=True)\n # sys.exit()\n\n with open(input_path) as fin:\n for line in fin:\n c[\"line_num\"] += 1\n if c[\"line_num\"] % 100000000 == 0:\n print(f\"Processing line no: {c['line_num']}\", flush=True)\n # Skip the first line of the entire patients dump\n if first_loop:\n first_loop = False\n continue\n\n patient_id_match = patient_id_pattern.match(line)\n # Found patient line\n if patient_id_match:\n if patient_lines:\n patient = format_patient(patient_lines, first_line, last_line)\n dump_patient(patient, output_path, c)\n\n patient_lines.clear()\n patient_lines.append(line)\n # Take care of the last patient\n if patient_lines:\n # Remove the last line for the entire patients dump\n patient_lines.pop()\n patient = format_patient(patient_lines, first_line, last_line)\n dump_patient(patient, output_path, c)\n\n print(f\"{c}\")\n\n\nif __name__ == \"__main__\":\n repo_dir = \"/Users/hamc649/Documents/deepcare/covid-19/covid-nlp\"\n input_extension = \"json\"\n output_extension = \"jsonl\"\n n_patients_per_partition = 1000\n script_name = \"convert_json_to_jsonl\"\n script_dir = f\"{repo_dir}/{script_name}\"\n output_dir = f\"{script_dir}/output\"\n\n #input_dir = f\"{script_dir}/input/covid_like_patients_24hr\"\n input_dir = f\"{script_dir}/input\"\n \n #base_path = 'entity_risk_by_patients_processed_covidlike_admission_notes'\n #base_path = 'entity_risk_by_patients_processed_covid_admission_notes'\n base_path = 'entity_risk_by_patients_processed_note'\n\n #input_paths = [f\"{base_path}_batch{x}.json\" for x in range(16)]\n input_paths = [f'{base_path}.json']\n\n output_path = f'{output_dir}/{base_path}.jsonl'\n\n print(f\"input_paths: {input_paths}\")\n print(f\"output_path: {output_path}\")\n #sys.exit(0)\n\n for input_path in input_paths:\n print(f\"Processing input_path: {input_path}\")\n full_input_path = f\"{input_dir}/{input_path}\"\n chunk_big_json(\n full_input_path,\n output_path,\n n_patients_per_partition,\n )\n","repo_name":"stanford-pnnl/covid-nlp","sub_path":"src/convert_json_to_jsonl.py","file_name":"convert_json_to_jsonl.py","file_ext":"py","file_size_in_byte":4127,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"72563563107","text":"#3\n#誤差逆伝播法を用いたニューラルネットワークの学習\n\nimport numpy as np\nfrom mnist import load_mnist\nfrom TwoLayerNet import TwoLayerNet\n\n#データの読み込み(one-hot表現はTrue)\n(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True, one_hot_label=True)\n\n#2層のニューラルネットワーク\n#入力層:784,隠し層:50,出力層:10,がニューロンの数\nnetwork = TwoLayerNet(input_size=784, hidden_size=50, output_size=10)\n\n#ハイパーパラメータ\niters_num = 10000 #\ntrain_size = x_train.shape[0]\nbatch_size = 100\nlearning_rate = 0.1\n\ntrain_loss_list = [] #損失\ntrain_acc_list = [] #精度\ntest_acc_list = [] #\n\niter_per_epoch = max(train_size / batch_size, 1)\n\nfor i in range(iters_num):\n #ミニバッチの取得(batch_size=100個ずつデータを無造作に取り出して行う)\n batch_mask = np.random.choice(train_size, batch_size)\n x_batch = x_train[batch_mask]\n t_batch = t_train[batch_mask]\n\n # 勾配 ここで誤差逆伝播法を用いる。\n #grad = network.numerical_gradient(x_batch, t_batch)\n grad = network.gradient(x_batch, t_batch)\n\n # 更新\n for key in ('W1', 'b1', 'W2', 'b2'):\n network.params[key] -= learning_rate * grad[key]\n\n #損失関数の値\n loss = network.loss(x_batch, t_batch)\n train_loss_list.append(loss)\n\n #1エポックごとに認識精度を表示(1に近づく)\n if i % iter_per_epoch == 0:\n train_acc = network.accuracy(x_train, t_train)\n test_acc = network.accuracy(x_test, t_test)\n train_acc_list.append(train_acc)\n test_acc_list.append(test_acc)\n print(train_acc, test_acc)\n","repo_name":"confinature/imageprocessing","sub_path":"05.誤差逆伝播法/mnist/train_neuralnet.py","file_name":"train_neuralnet.py","file_ext":"py","file_size_in_byte":1667,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"4603598014","text":"\"\"\"\nThis example should work with Discord.py and most of it's forks, including Pycord. Check dpy-cogs.py to understand how to use YARSAW in Cogs.\n\"\"\"\nfrom discord.ext import commands\nimport yarsaw\n\nbot = commands.Bot(command_prefix=\"!\")\n\n# create yarsaw client\nclient = yarsaw.Client(\"Random Stuff API Key\", \"RapidAPI Application Key\")\n\n\n@bot.command()\nasync def joke(ctx):\n joke = await client.get_safe_joke()\n await ctx.send(yarsaw.format_joke(joke))\n\n\nbot.load_extension(\"dpy-cogs\") # load the cog from dpy-cogs.py\n\nbot.run(\"TOKEN\")\n","repo_name":"BruceCodesGithub/yarsaw","sub_path":"examples/dpy.py","file_name":"dpy.py","file_ext":"py","file_size_in_byte":543,"program_lang":"python","lang":"en","doc_type":"code","stars":7,"dataset":"github-code","pt":"70"} +{"seq_id":"33381629506","text":"import re\n\n\ndef partition(array: list, low: int, high: int, modify: bool = False) -> int:\n \"\"\"\n Choose the rightmost element as the pivot, place the pivot at its correct\n position in the sorted array, and place all smaller elements to the left\n and larger elements to the right.\n\n Args:\n array (list): The list to be sorted.\n low (int): Starting index of the partition.\n high (int): Ending index of the partition.\n modify (bool, optional): If True, the array contains tuples. Defaults to False.\n\n Returns:\n int: Index of the pivot element.\n \"\"\"\n if modify:\n pivot = array[high][0]\n else:\n pivot = array[high]\n i = low - 1\n\n for j in range(low, high):\n if modify:\n elem = array[j][0]\n else:\n elem = array[j]\n if elem <= pivot:\n i = i + 1\n (array[i], array[j]) = (array[j], array[i])\n (array[i + 1], array[high]) = (array[high], array[i + 1])\n return i + 1\n\n\ndef quick_sort(array: list, low: int, high: int, modify: bool = False) -> None:\n \"\"\"\n Sort the array using the QuickSort algorithm.\n\n Args:\n array (list): The list to be sorted.\n low (int): Starting index of the array.\n high (int): Ending index of the array.\n modify (bool, optional): If True, the array contains tuples. Defaults to False.\n \"\"\"\n if low < high:\n pi = partition(array, low, high, modify)\n quick_sort(array, low, pi - 1, modify)\n quick_sort(array, pi + 1, high, modify)\n\n\ndef binary_search(array: list[str], target: str) -> int:\n \"\"\"\n Perform binary search on a sorted array to find the target.\n\n Args:\n array (list[str]): The sorted list to be searched.\n target (str): The target string to be searched.\n\n Returns:\n int: 0 if the target is found, -1 otherwise.\n \"\"\"\n start, stop = 0, len(array)-1\n\n while start <= stop:\n middle = (start + stop) // 2\n if re.search(f\"^{array[middle]}\", target):\n return 0\n elif array[middle] < target:\n start = middle + 1\n else:\n stop = middle - 1\n\n return -1\n","repo_name":"Mukul-code-rep/metaphor-api-project","sub_path":"helper.py","file_name":"helper.py","file_ext":"py","file_size_in_byte":2274,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"39624543920","text":"# -*- coding: utf-8 -*-\nfrom odoo import models, fields, api, exceptions, _\n\n\nclass EbayCurrencyMapper(models.Model):\n _name = 'ebay.currency.mapper'\n _description = _('Model that is used to map custom ebay currency string to internal currency record')\n\n external_code = fields.Char(string='External currency code', required=True)\n currency_id = fields.Many2one('res.currency', string='Currency', required=True)\n\n @api.multi\n @api.constrains('external_code')\n def _check_external_code(self):\n \"\"\"Ensure that external code is unique\"\"\"\n for rec in self:\n if self.search_count([('external_code', '=', rec.external_code)]) > 1:\n raise exceptions.ValidationError(\n _('External code {} already has a mapping!').format(rec.external_code)\n )\n\n @api.multi\n def name_get(self):\n \"\"\"Custom name get for mapper\"\"\"\n return [\n (x.id, _('[%s] -> [%s]') % (x.external_code, x.currency_id.name)) for x in self\n ]\n","repo_name":"websharp950223/financing","sub_path":"robo_verslas/ebay/models/ebay_currency_mapper.py","file_name":"ebay_currency_mapper.py","file_ext":"py","file_size_in_byte":1030,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"22783172084","text":"'''2-Um funcionário de uma empresa recebe aumento salarial anualmente: Sabe-se\r\nque:\r\n• Esse funcionário foi contratado em 1995, com salário inicial de R$\r\n1.000,00;\r\n• Em 1996 recebeu aumento de 1,5% sobre seu salário inicial;\r\n• A partir de 1997 (inclusive), os aumentos salariais sempre correspondem\r\nao dobro do percentual do ano anterior. Faça um programa que\r\ndetermine o salário atual desse funcionário. Após concluir isto, altere o\r\nprograma permitindo que o usuário digite o salário inicial do funcionário.'''\r\n\r\nsalario=float(input('Insira o salario desejado: '))\r\nporcentage=float(input('Insira a porcentagem sendo: '))\r\npercentual=porcentage/100.0\r\nporcentage2=(porcentage*2**(25))\r\naumento=salario*percentual\r\nnovosalario=aumento+salario\r\npercentual2=porcentage2/100.0\r\naumento2=novosalario*percentual2\r\nnovosalario2=aumento2+novosalario\r\nprint(porcentage2)\r\nprint(\"O Seu aumento foi de:\",aumento)\r\nprint(\"O Seu novo Salario é: \", novosalario)\r\nif str(input('Deseja ver seu Salario proximo ano?')) == 'Y' or 'y':\r\n print(novosalario2)\r\n","repo_name":"vbtatagiba/Eng.Software","sub_path":"Python/Algoritmo/Aula 6/EX Prova.py","file_name":"EX Prova.py","file_ext":"py","file_size_in_byte":1070,"program_lang":"python","lang":"pt","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"16945344913","text":"import os\nimport numpy as np\nfrom .read_buffer_offline import read_buffer_offline_data, read_buffer_offline_events, read_buffer_offline_header\nfrom mindaffectBCI.decoder.utils import block_randomize, butter_sosfilt, upsample_codebook, lab2ind, window_axis\n\ntrigger_event='stimulus.note.play' # the actual times the user hit the button\n\ndef load_brainsonfire(datadir, sessdir=None, sessfn=None, fs_out=60, stopband=((45,65),(0,1),(25,-1)), subtriallen=10, nvirt=20, chIdx=slice(64), verb=2):\n \n # load the data file\n Xfn = datadir\n if sessdir:\n Xfn = os.path.join(Xfn, sessdir)\n if sessfn:\n Xfn = os.path.join(Xfn, sessfn)\n sessdir = os.path.dirname(Xfn)\n\n if verb > 1: print(\"Loading header\")\n hdr=read_buffer_offline_header(Xfn)\n if verb > 1: print(\"Loading data\")\n X = read_buffer_offline_data(Xfn,hdr) # (nsamp,nch)\n if verb > 1: print(\"Loading events\")\n evts=read_buffer_offline_events(Xfn)\n\n fs = hdr.fs\n ch_names = hdr.labels\n\n if chIdx is not None:\n X = X [...,chIdx]\n ch_names = ch_names[chIdx] if ch_names is not None else None\n\n # pre-resample to save memory\n rsrate = int(fs//120)\n if rsrate > 1:\n if verb > 0: print(\"Pre-re-sample by {}: {}->{}Hz\".format(rsrate,fs,fs/rsrate))\n X = X [::rsrate,:]\n for e in evts:\n e.sample = e.sample/rsrate\n fs = fs/rsrate\n\n if verb > 0: print(\"X={} @{}Hz\".format(X.shape,fs),flush=True)\n\n # extract the trigger info\n trigevts = [e for e in evts if e.type.lower() == trigger_event]\n trig_samp= np.array([e.sample for e in trigevts],dtype=int)\n trig_val = [e.value for e in trigevts]\n trig_ind, lab2class = lab2ind(trig_val) # convert to indicator (ntrig,ncls)\n # up-sample to stim rate\n Y = np.zeros((X.shape[0],trig_ind.shape[-1]),dtype=bool)\n Y[trig_samp,:] = trig_ind\n if verb > 0:\n print(\"Y={}\".format(Y.shape))\n \n # BODGE: trim to useful data range\n if .1 < (trig_samp[0]-fs)/X.shape[0] or (trig_samp[-1]+fs)/X.shape[0] < .9:\n if verb>0 : print('Trimming range: {}-{}s'.format(trig_samp[0]/fs,trig_samp[-1]/fs))\n # limit to the useful data range\n rng = slice(int(trig_samp[0]-fs), int(trig_samp[-1]+fs))\n X = X[rng, :]\n Y = Y[rng, ...]\n if verb > 0: print(\"X={}\".format(X.shape))\n if verb > 0: print(\"Y={}\".format(Y.shape))\n\n # preprocess -> spectral filter, in continuous time!\n if stopband is not None:\n if verb > 0:\n print(\"preFilter: {}Hz\".format(stopband))\n X, _, _ = butter_sosfilt(X,stopband,fs)\n \n # preprocess -> downsample\n resamprate = int(fs/fs_out)\n if resamprate > 1:\n if verb > 0:\n print(\"resample by {}: {}->{}Hz\".format(resamprate, fs, fs/resamprate))\n X = X[..., ::resamprate, :] # decimate X (trl, samp, d)\n # re-sample Y, being sure to keep any events in the re-sample window\n Y = window_axis(Y,winsz=resamprate,step=resamprate,axis=-2) # (trl, samp, win, e)\n Y = np.max(Y,axis=-2) # (trl,samp,e) N.B. use max so don't loose single sample events\n fs = fs/resamprate\n\n # make virtual targets\n Y = Y[:,np.newaxis,:] # (nsamp,1,e)\n Y_virt = block_randomize(Y, nvirt, axis=-3) # (nsamp,nvirt,e)\n Y = np.concatenate((Y, Y_virt), axis=-2) # (nsamp,1+nvirt,e)\n if verb > 0: print(\"Y={}\".format(Y.shape))\n\n # cut into sub-trials\n nsubtrials = X.shape[0]/fs/subtriallen\n if nsubtrials > 1:\n winsz = int(X.shape[0]//nsubtrials)\n if verb > 0: print('subtrial winsz={}'.format(winsz))\n # slice into sub-trials\n X = window_axis(X,axis=0,winsz=winsz,step=winsz) # (trl,win,d)\n Y = window_axis(Y,axis=0,winsz=winsz,step=winsz) # (trl,win,nY)\n if verb > 0: print(\"X={}\".format(X.shape))\n if verb > 0: print(\"Y={}\".format(Y.shape))\n \n # make coords array for the meta-info about the dimensions of X\n coords = [None]*X.ndim\n coords[0] = {'name':'trial'}\n coords[1] = {'name':'time','unit':'ms', \\\n 'coords':np.arange(X.shape[1])/fs, \\\n 'fs':fs}\n coords[2] = {'name':'channel','coords':ch_names}\n # return data + metadata\n return (X, Y, coords)\n\ndef testcase():\n import sys\n\n if os.path.isdir('D:\\external_data'):\n datadir = 'D:/own_experiments/'\n else:\n datadir = '/home/jadref/data/bci/own_experiments'\n sessfn = os.path.join(datadir,'motor_imagery/brainsonfire/brains_on_fire_online/subject01/raw_buffer/0001')\n # command-line, for testing\n if len(sys.argv) > 1:\n sessfn = sys.argv[1]\n \n from load_brainsonfire import load_brainsonfire\n print(\"Loading: {}\".format(sessfn))\n oX, oY, coords = load_brainsonfire(sessfn, fs_out=60)\n times = coords[1]['coords']\n fs = coords[1]['fs']\n ch_names = coords[2]['coords']\n X=oX.copy()\n Y=oY.copy()\n\n print(\"X({}){}\".format([c['name'] for c in coords],X.shape))\n print(\"Y={}\".format(Y.shape))\n print(\"fs={}\".format(fs))\n\n tau=fs*.3\n evtlabs = None\n times=np.arange(int(tau))/fs\n rank=10\n\n # visualize the dataset\n from stim2event import stim2event\n from updateSummaryStatistics import updateSummaryStatistics, plot_erp, plot_summary_statistics, idOutliers\n import matplotlib.pyplot as plt\n \n Cxx, Cxy, Cyy = updateSummaryStatistics(X, Y[...,0:1,:], tau=tau)\n\n plt.figure(1);\n print(\"summary stats\")\n plot_summary_statistics(Cxx, Cxy, Cyy, evtlabs, times, ch_names)\n\n plt.figure(2);\n print(\"ERP\")\n plot_erp(Cxy, ch_names=ch_names, evtlabs=evtlabs, times=times, plottype='plot', axis=-1)\n\n from model_fitting import MultiCCA\n from decodingCurveSupervised import decodingCurveSupervised\n cca = MultiCCA(tau=tau, evtlabs=evtlabs, rank=rank)\n scores = cca.cv_fit(X, Y)\n Fy = scores['estimator']\n print(\"Fy={}\".format(Fy.shape))\n (_)=decodingCurveSupervised(Fy)\n\n # plot the solution\n from scoreStimulus import factored2full\n print(\"Plot Model\")\n plt.figure(3)\n plot_erp(factored2full(cca.W_, cca.R_), ch_names=ch_names, evtlabs=evtlabs, times=times)\n # plot Fy\n plt.figure(4)\n for ti in range(min(Fy.shape[0],25)):\n plt.subplot(5,5,ti+1)\n plt.imshow(np.cumsum(Fy[ti,:,:],axis=-2),aspect='auto')\n plt.show()\n \n \nif __name__==\"__main__\":\n testcase()\n","repo_name":"mindaffect/pymindaffectBCI","sub_path":"mindaffectBCI/decoder/offline/load_brainsonfire.py","file_name":"load_brainsonfire.py","file_ext":"py","file_size_in_byte":6375,"program_lang":"python","lang":"en","doc_type":"code","stars":55,"dataset":"github-code","pt":"70"} +{"seq_id":"31060105625","text":"from lib.utils import Utils\nimport sys\nimport traceback\nfrom inspect import signature\nimport re\nimport asyncio\nimport logging\nimport requests\nfrom iowait import IOWait\nimport socket\nimport struct\nimport subprocess\nimport threading\nimport time\nfrom contextlib import suppress\nfrom . import aioudp\n\n\n# Turn off ssl warnings from urllib\nrequests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)\nlogging.getLogger('urllib3').setLevel(logging.WARNING)\n\n\nclass Network(object):\n \"\"\"\n Provide useful static methods that you can use in your projects.\n\n NOTE: Some format check routines were duplicate with lib.utils. As these primarily check string formats and are used for metadata parsing, they were removed here to prevent duplicates.\n \"\"\"\n\n @staticmethod\n def ip_port_to_socket(ip, port):\n \"\"\"\n Return an ip address plus port to a socket string.\n\n Format is 'ip:port' for IPv4 or '[ip]:port' for IPv6\n\n :return: Socket address / IP endpoint as string\n :rtype: string\n \"\"\"\n if Utils.is_ipv6(ip):\n ip = f'[{ip}]'\n return f'{ip}:{port}'\n\n @staticmethod\n def family_to_string(family):\n \"\"\"\n Convert a socket address family to an ip version string 'IPv4' or 'IPv6'.\n\n :param family: Socket family\n :type family: socket.AF_INET or socket.AF_INET6\n\n :return: 'IPv4' or 'IPv6'\n :rtype: string\n \"\"\"\n return 'IPv6' if family == socket.AF_INET6 else 'IPv4'\n\n @staticmethod\n def ping(ip):\n \"\"\"\n Try to ICMP ping a host using external OS utilities. IPv4 only.\n\n :param ip: IPv4 address as a string\n :type ip: string\n\n :return: True if a reachable, false otherwise.\n :rtype: bool\n \"\"\"\n logger = logging.getLogger(__name__)\n if subprocess.call(f'ping -c 1 {ip}', shell=True, stdout=open('/dev/null', 'w'), stderr=subprocess.STDOUT) == 0:\n logger.debug(f'Ping: {ip} is online')\n return True\n else:\n logger.debug(f'Ping: {ip} is offline')\n return False\n\n @staticmethod\n def ping_port(ip, port=80):\n \"\"\"\n Try to reach a given TCP port. IPv4 only.\n\n :param ip: IPv4 address\n :param port: Port number\n\n :type ip: string\n :type port: int\n\n :return: True if reachable, false otherwise.\n :rtype: bool\n \"\"\"\n logger = logging.getLogger(__name__)\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.settimeout(2)\n if sock.connect_ex((ip, int(port))) == 0:\n logger.debug(f'Ping: port {port} on {ip} is reachable')\n sock.close()\n return True\n else:\n logger.debug(f'Ping: port {port} on {ip} is offline or not reachable')\n sock.close()\n return False\n\n @staticmethod\n def send_wol(mac, ip='255.255.255.255'):\n \"\"\"\n Send a wake on lan packet to the given mac address using ipv4 broadcast (or directed to specific ip).\n\n :param mac: Mac address to wake up (pure numbers or with any separator)\n :type mac: string\n \"\"\"\n logger = logging.getLogger(__name__)\n if len(mac) == 12:\n pass\n elif len(mac) == 12 + 5:\n mac = mac.replace(mac[2], '')\n else:\n logger.error('Incorrect MAC address format')\n return\n\n data = ''.join(['FFFFFFFFFFFF', mac * 16])\n send_data = b''\n for i in range(0, len(data), 2):\n send_data = b''.join([send_data, struct.pack('B', int(data[i: i + 2], 16))])\n\n for _ in range(15):\n sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)\n sock.sendto(send_data, (ip, 9))\n logger.debug(f'Sent WOL packet to {mac}')\n\n @staticmethod\n def validate_inet_addr(addr, port):\n \"\"\"\n Validate that addr:port resolve properly and return resolved IP address and port.\n\n :param addr: hostname or ip address under test\n :type addr: str\n :param port: port number under test\n :type port: num\n :return: (ip_address, port, family) or (None, 0, None) if error occurs\n :rtype: tuple\n \"\"\"\n logger = logging.getLogger(__name__)\n # Test if host is empty\n if addr == '':\n return ('', port, socket.AF_INET)\n else:\n # try to resolve addr to get more info\n logger.debug(f'trying to resolve addr {addr} with port {port}')\n try:\n family, sockettype, proto, canonname, socketaddr = socket.getaddrinfo(addr, None)[0]\n # Check if resolved address is IPv4 or IPv6\n if family == socket.AF_INET:\n ip, _ = socketaddr\n elif family == socket.AF_INET6:\n ip, _, flow_info, scope_id = socketaddr\n else:\n # might be AF_UNIX or something esoteric?\n logger.error(f'Unsupported address family {family}')\n ip = None\n if ip is not None:\n logger.info(f'Resolved {addr} to {Network.family_to_string(family)} address {ip}')\n except socket.gaierror as e:\n # Unable to resolve hostname\n logger.error(f'Cannot resolve {addr} to a valid ip address (v4 or v6): {e}')\n ip = None\n port = 0\n family = None\n\n return (ip, port, family)\n\n @staticmethod\n def clean_uri(uri, mode='show'):\n \"\"\"\n Check URIs for embedded http/https login data (http://user:pass@domain.tld...) and clean it.\n\n Possible modes are:\n\n - 'show': don't change URI (default) -> ``http://user:pass@domain.tld...``\n - 'mask': replace login data with ``***`` -> ``http://***:***@domain.tld...``\n - 'strip': remove login data part -> ``http://domain.tld...``\n\n :param uri: full URI to check and process\n :param mode: handling mode, one of 'show', 'strip', 'mask'\n :return: resulting URI string\n\n :type uri: str\n :type mode: str\n :rtype: str\n \"\"\"\n # find login data\n pattern = re.compile('http([s]?)://([^:]+:[^@]+@)')\n # possible replacement modes\n replacement = {\n 'strip': 'http\\\\g<1>://',\n 'mask': 'http\\\\g<1>://***:***@'\n }\n\n # if no change requested or no login data found, return unchanged\n if mode not in replacement or not pattern.match(uri):\n return uri\n\n # return appropriately changed URI\n return pattern.sub(replacement[mode], uri)\n\n\nclass Connections(object):\n \"\"\"\n Within SmartHome.py there is one instance of this class\n\n The monitoring feature enables autoconnecting and auto-\n reconnecting by checking .connected and calling\n .connect()\n \"\"\"\n\n _monitor = []\n\n def __init__(self):\n self._name = self.__class__.__name__\n\n def monitor(self, obj):\n if obj not in self._monitor:\n self._monitor.append(obj)\n\n def unmonitor(self, obj):\n if obj in self._monitor:\n self._monitor.remove(obj)\n\n def check(self):\n for obj in self._monitor:\n if obj.alive and not obj.connected:\n obj.connect()\n\n\nclass Http(object):\n \"\"\"\n Provide methods to simplify HTTP connections, especially to talk to HTTP servers.\n\n :param baseurl: base URL used everywhere in this instance (example: http://www.myserver.tld)\n :param timeout: Set a maximum amount of seconds the class should try to establish a connection\n :param hide_login: Hide or mask login data in logged http(s) requests (see ``Network.clean_uri()``)\n\n :type baseurl: str\n :type timeout: int\n :type hide_login: str\n \"\"\"\n\n def __init__(self, baseurl='', timeout=10, hide_login='show', name=None):\n self.logger = logging.getLogger(__name__)\n\n self.baseurl = baseurl\n self._response = None\n self.timeout = timeout\n self._session = requests.Session()\n self._hide_login = hide_login\n\n self._id = f'({name if name else \"HTTP\"}_{self.baseurl})'\n\n def HTTPDigestAuth(self, user=None, password=None):\n \"\"\"\n Create a HTTPDigestAuth instance and returns it to the caller.\n\n :param user: Username\n :param password: Password\n\n :type user: str\n :type password: str\n\n :return: HTTPDigestAuth object\n :rtype: HTTPDigestAuth\n \"\"\"\n return requests.auth.HTTPDigestAuth(user, password)\n\n def post_json(self, url=None, params=None, verify=True, auth=None, json=None, files={}):\n \"\"\"\n Launch a POST request and return JSON answer as a dict or None on error.\n\n :param url: Optional URL to fetch from. If None (default) use baseurl given on init.\n :param params: Optional dict of parameters to add to URL query string.\n :param verify: Set to false to ignore SSL certificate verification errors (for self-signed for example)\n :param auth: Optional authentication object\n\n :type url: str\n :type params: dict\n :type verify: bool\n :type auth: HTTPBasicAuth | HTTPDigestAuth | ...\n\n :return: JSON answer decoded into a dict or None on whatever error occured\n :rtype: dict | None\n \"\"\"\n if self.__post(url=url, params=params, verify=verify, auth=auth, json=json, files=files):\n json = None\n try:\n json = self._response.json()\n except Exception:\n self.logger.warning(f'{self._id} invalid JSON received from {Network.clean_uri(url, self._hide_login) if url else self.baseurl}')\n return json\n return None\n\n def get_json(self, url=None, params=None, verify=True, auth=None):\n \"\"\"\n Launch a GET request and return JSON answer as a dict or None on error.\n\n :param url: Optional URL to fetch from. If None (default) use baseurl given on init.\n :param params: Optional dict of parameters to add to URL query string.\n :param verify: Set to false to ignore SSL certificate verification errors (for self-signed for example)\n :param auth: Optional authentication object\n\n :type url: str\n :type params: dict\n :type verify: bool\n :type auth: HTTPBasicAuth | HTTPDigestAuth | ...\n\n :return: JSON answer decoded into a dict or None on whatever error occured\n :rtype: dict | None\n \"\"\"\n if self.__get(url=url, params=params, verify=verify, auth=auth):\n json = None\n try:\n json = self._response.json()\n except Exception:\n self.logger.warning(f'{self._id} invalid JSON received from {Network.clean_uri(url if url else self.baseurl, self._hide_login) }')\n return json\n return None\n\n def get_text(self, url=None, params=None, encoding=None, timeout=None):\n \"\"\"\n Launch a GET request and return answer as string or None on error.\n\n :param url: Optional URL to fetch from. Default is to use baseurl given to constructor.\n :param params: Optional dict of parameters to add to URL query string.\n :param encoding: Optional encoding of the received text. Default is to let the lib try to figure out the right encoding.\n\n :type url: str\n :type params: dict\n :type encoding: str\n\n :return: Answer decoded into a string or None on whatever error occured\n :rtype: str | None\n \"\"\"\n _text = None\n if self.__get(url=url, params=params, timeout=timeout):\n try:\n if encoding:\n self._response.encoding = encoding\n _text = self._response.text\n except Exception as e:\n self.logger.error(f'{self._id} successful GET, but decoding response failed. This should never happen...error was: {e}')\n return _text\n\n def download(self, url=None, local=None, params=None, verify=True, auth=None):\n \"\"\"\n Download a binary file to a local path.\n\n :param url: Remote file to download. Attention: Must be full url. 'baseurl' is NOT prefixed here.\n :param local: Local file to save\n :param params: Optional dict of parameters to add to URL query string.\n :param verify: Set to false to ignore SSL certificate verification errors (for self-signed for example)\n :param auth: Optional authentication object\n\n :type url: str\n :type local: str\n :type params: dict\n :type verify: bool\n :type auth: HTTPBasicAuth | HTTPDigestAuth | ...\n\n :return: Returns true on success, else false\n :rtype: bool\n \"\"\"\n if self.__get(url=url, params=params, verify=verify, auth=auth, stream=True):\n self.logger.debug(f'{self._id} download of {Network.clean_uri(url, self._hide_login)} successfully completed, saving to {local}')\n with open(str(local), 'wb') as f:\n for chunk in self._response:\n f.write(chunk)\n return True\n else:\n self.logger.warning(f'{self._id} download error: {Network.clean_uri(url, self._hide_login)}')\n return False\n\n def get_binary(self, url=None, params=None):\n \"\"\"\n Launch a GET request and return answer as raw binary data or None on error.\n\n This is useful for downloading binary objects / files.\n\n :param url: Optional URL to fetch from. Default is to use baseurl given to constructor.\n :param params: Optional dict of parameters to add to URL query string.\n\n :type url: str\n :type params: dict\n\n :return: Answer as raw binary objector None on whatever error occured\n :rtype: bytes | None\n \"\"\"\n self.__get(url=url, params=params)\n return self._response.content\n\n def response_status(self):\n \"\"\"\n Return the status code (200, 404, ...) of the last executed request.\n\n If GET request was not possible and thus no HTTP statuscode is available,\n the returned status code is 0.\n\n :return: Status code and text of last request\n :rtype: tuple(int, str)\n \"\"\"\n try:\n (code, reason) = (self._response.status_code, self._response.reason)\n except Exception:\n code = 0\n reason = 'Unable to complete GET request'\n return (code, reason)\n\n def response_headers(self):\n \"\"\"\n Return a dictionary with the server return headers of the last executed request.\n\n :return: Headers returned by server\n :rtype: dict\n \"\"\"\n return self._response.headers\n\n def response_cookies(self):\n \"\"\"\n Return a dictionary with the cookies the server may have sent on the last executed request.\n\n :return: Cookies returned by server\n :rtype: dict\n \"\"\"\n return self._response.cookies\n\n def response_object(self):\n \"\"\"\n Return the raw response object for advanced ussage.\n\n :return: Reponse object as returned by underlying requests library\n :rtype: `requests.Response `_\n \"\"\"\n return self._response\n\n def __post(self, url=None, params=None, timeout=None, verify=True, auth=None, json=None, data=None, files={}):\n \"\"\"\n Send POST request. Non-documented arguments are passed on to requests.request().\n\n :param url: URL to which to POST\n :type url: str\n :param data: data to submit to POST\n :type data: dict or bytes or file\n\n :return: True if POST was successful\n :rtype: bool\n \"\"\"\n url = self.baseurl + url if url else self.baseurl\n timeout = timeout if timeout else self.timeout\n data = json if json else data\n self.logger.info(f'{self._id} sending POST request {json} to {Network.clean_uri(url, self._hide_login)}')\n try:\n self._response = self._session.post(url, params=params, timeout=timeout, verify=verify, auth=auth, data=data, files=files)\n self.logger.debug(f'{self.response_status()} Posted to URL {Network.clean_uri(self._response.url, self._hide_login)}')\n except Exception as e:\n self.logger.warning(f'{self._id} error sending POST request to {Network.clean_uri(url, self._hide_login)}: {e}')\n return False\n return True\n\n def __get(self, url=None, params=None, timeout=None, verify=True, auth=None, stream=False):\n \"\"\"\n Send POST request. Non-documented arguments are passed on to requests.request().\n\n :param url: URL to which to GET\n :type url: str\n\n :return: True if GET was successful\n :rtype: bool\n \"\"\"\n url = self.baseurl + url if url else self.baseurl\n timeout = timeout if timeout else self.timeout\n self.logger.info(f'{self._id} sending GET request to {Network.clean_uri(url, self._hide_login)}')\n try:\n self._response = self._session.get(url, params=params, timeout=timeout, verify=verify, auth=auth, stream=stream)\n self.logger.debug(f'{self._id} {self.response_status()} fetched URL {Network.clean_uri(self._response.url, self._hide_login)}')\n except Exception as e:\n self.logger.warning(f'{self._id} error sending GET request to {Network.clean_uri(url, self._hide_login)}: {e}')\n self._response = None\n return False\n return True\n\n\nclass Tcp_client(object):\n \"\"\"\n Structured class to handle locally initiated TCP connections with two-way communication.\n\n The callbacks need to be defined as follows:\n\n def connected_callback(Tcp_client_instance)\n def receiving_callback(Tcp_client_instance)\n def disconnected_callback(Tcp_client_instance)\n def data_received_callback(Tcp_client_instance, message)\n\n (Class members need the additional first `self` parameter)\n\n\n :param host: Remote host name or ip address (v4 or v6)\n :param port: Remote host port to connect to\n :param name: Name of this connection (mainly for logging purposes). Try to keep the name short.\n :param autoreconnect: Should the socket try to reconnect on lost connection (or finished connect cycle)\n :param connect_retries: Number of connect retries per cycle\n :param connect_cycle: Time between retries inside a connect cycle\n :param retry_cycle: Time between cycles if :param:autoreconnect is True\n :param binary: Switch between binary and text mode. Text will be encoded / decoded using encoding parameter.\n :param terminator: Terminator to use to split received data into chunks (split lines for example). If integer then split into n bytes. Default is None means process chunks as received.\n :param autoconnect: automatically connect on send. Copies autoreconnect if None\n\n :type host: str\n :type port: int\n :type name: str\n :type autoreconnect: bool\n :type connect_retries: int\n :type connect_cycle: int\n :type retry_cycle: int\n :type binary: bool\n :type terminator: int | bytes | str\n :type autoconnect: bool\n \"\"\"\n\n def __init__(self, host, port, name=None, autoreconnect=True, connect_retries=5, connect_cycle=5, retry_cycle=30, binary=False, terminator=False, timeout=1, autoconnect=None):\n self.logger = logging.getLogger(__name__)\n\n # public properties\n self.name = name\n self.terminator = terminator\n\n # protected properties\n self._host = host\n self._port = port\n self._autoreconnect = autoreconnect\n self._autoconnect = autoconnect\n if self._autoconnect is None:\n self._autoconnect = self._autoreconnect\n self._is_connected = False\n self._is_receiving = False\n self._connect_retries = connect_retries\n self._connect_cycle = connect_cycle\n self._retry_cycle = retry_cycle\n self._timeout = timeout\n\n self._hostip = None\n self._family = socket.AF_INET\n self._socket = None\n self._connect_counter = 0\n self._binary = binary\n\n self._connected_callback = None\n self._receiving_callback = None\n self._disconnected_callback = None\n self._data_received_callback = None\n\n # private properties\n self.__connect_thread = None\n self.__connect_threadlock = threading.Lock()\n self.__receive_thread = None\n self.__receive_threadlock = threading.Lock()\n self.__running = False\n\n # self.logger.setLevel(logging.DEBUG) # Das sollte hier NICHT gesetzt werden, sondern in etc/logging.yaml im Logger lib.network konfiguriert werden!\n\n self._host = host\n self._port = port\n self._id = f'({self.name if self.name else \"TCP_Client\"}_{self._host}:{self._port})'\n (self._hostip, self._port, self._family) = Network.validate_inet_addr(host, port)\n if self._hostip is not None:\n self._id = f'({self.name if self.name else \"TCP_Client\"}_{self._hostip}:{self._port})'\n self.logger.info(f'{self._id} Initializing a connection to {self._host} on TCP port {self._port} {\"with\" if self._autoreconnect else \"without\"} autoreconnect')\n else:\n self.logger.error(f'{self._id} Connection to {self._host} not possible, invalid address')\n\n def set_callbacks(self, connected=None, receiving=None, data_received=None, disconnected=None):\n \"\"\"\n Set callbacks to caller for different socket events.\n\n :param connected: Called whenever a connection is established successfully\n :param data_received: Called when data is received\n :param disconnected: Called when a connection has been dropped for whatever reason\n\n :type connected: function\n :type data_received: function\n :type disconnected: function\n \"\"\"\n if connected:\n params = len(signature(connected).parameters)\n self.logger.debug(f\"connected_callback for {self._id} is {connected.__qualname__} and it expects {params} arguments\")\n self._connected_callback = connected\n if receiving:\n params = len(signature(receiving).parameters)\n self.logger.debug(f\"connected_callback for {self._id} is {receiving.__qualname__} and it expects {params} arguments\")\n self._receiving_callback = receiving\n if disconnected:\n params = len(signature(disconnected).parameters)\n self.logger.debug(f\"connected_callback for {self._id} is {disconnected.__qualname__} and it expects {params} arguments\")\n self._disconnected_callback = disconnected\n if data_received:\n params = len(signature(data_received).parameters)\n self.logger.debug(f\"connected_callback for {self._id} is {data_received.__qualname__} and it expects {params} arguments\")\n self._data_received_callback = data_received\n\n def connect(self):\n \"\"\"\n Connect the socket.\n\n :return: False if an error prevented us from launching a connection thread. True if a connection thread has been started.\n :rtype: bool\n \"\"\"\n if self._is_connected: # return false if already connected\n self.logger.debug(f'{self._id} already connected, ignoring new request')\n return False\n\n if self._hostip is None: # return False if no valid ip to connect to\n self.logger.error(f'{self._id} no valid IP address to connect')\n self._is_connected = False\n return False\n\n self.logger.debug(f'Starting connect to {self._host}:{self._port}')\n if not self.__connect_thread or not self.__connect_thread.is_alive():\n self.__connect_thread = threading.Thread(target=self._connect_thread_worker, name=f'TCP_Connect {self._id}')\n self.__connect_thread.daemon = True\n self.logger.debug(f'connect() to {self._host}:{self._port}: self.__running={self.__running}, self.__connect_thread.is_alive()={self.__connect_thread.is_alive()}')\n if not self.__running or not self.__connect_thread.is_alive():\n self.logger.debug(f'connect() to {self._host}:{self._port}: calling __connect_thread.start()')\n self.__connect_thread.start()\n self.logger.debug(f'leaving connect() to {self._host}:{self._port}')\n return True\n\n def connected(self):\n \"\"\"\n Return the current connection state.\n\n :return: True if an active connection exists,else False.\n :rtype: bool\n \"\"\"\n return self._is_connected\n\n def send(self, message):\n \"\"\"\n Send a message to the server. Can be a string, bytes or a bytes array.\n\n :return: True if message has been successfully sent, else False.\n :rtype: bool\n \"\"\"\n if not isinstance(message, (bytes, bytearray)):\n try:\n message = message.encode('utf-8')\n except Exception:\n self.logger.warning(f'{self._id} error encoding message for client')\n return False\n\n # automatically (re)connect on send attempt\n if not self._is_connected:\n if self._autoconnect:\n self.logger.debug(f'{self._id} autoconnecting on send attempt, message is {message}')\n self.connect()\n else:\n self.logger.warning(f'{self._id} trying to send {message}, but not connected and autoconnect not active. Aborting.')\n return False\n\n try:\n if self._is_connected:\n bytes_sent = self._socket.send(message)\n if bytes_sent != len(message):\n self.logger.warning(f'{self._id} error sending message {message}: message truncated, sent {bytes_sent} of {len(message)} bytes')\n else:\n return False\n\n except (BrokenPipeError, TimeoutError) as e:\n if e.errno == 60:\n # timeout\n self.logger.warning(f'{self._id} detected timeout, disconnecting, send failed.')\n else:\n self.logger.warning(f'{self._id} detected disconnect, send failed.')\n self._is_connected = False\n try:\n self._socket.shutdown()\n except Exception:\n pass\n if self._disconnected_callback:\n self._disconnected_callback(self)\n if self._autoreconnect:\n self.logger.debug(f'{self._id} autoreconnect enabled')\n self.connect()\n return False\n\n except Exception as e: # log errors we are not prepared to handle and raise exception for further debugging\n self.logger.warning(f'{self._id} unhandleded error on sending, cannot send data {message}. Error: {e}')\n raise\n\n return True\n\n def _connect_thread_worker(self):\n \"\"\"\n Thread worker to handle connection.\n \"\"\"\n if not self.__connect_threadlock.acquire(blocking=False):\n self.logger.info(f'{self._id} connection attempt already in progress, ignoring new request')\n return\n if self._is_connected:\n self.logger.info(f'{self._id} already connected, ignoring new request')\n return\n self.logger.debug(f'{self._id} starting connection cycle')\n self._connect_counter = 0\n self.__running = True\n while self.__running and not self._is_connected:\n # Try a full connect cycle\n while not self._is_connected and self._connect_counter < self._connect_retries and self.__running:\n self._connect()\n if self._is_connected:\n try:\n self.__connect_threadlock.release()\n if self._connected_callback:\n self._connected_callback(self)\n name = f'TCP_Client {self._id}'\n self.__receive_thread = threading.Thread(target=self.__receive_thread_worker, name=name)\n self.__receive_thread.daemon = True\n self.__receive_thread.start()\n except Exception:\n self.logger.error(f\"could not start __receive_thread_worker for {name}\")\n raise\n return True\n else:\n self.logger.warning(f\"self._connect() for {name} did not work\")\n if self.__running:\n self._sleep(self._connect_cycle)\n\n if self._autoreconnect and self.__running:\n self._sleep(self._retry_cycle)\n self._connect_counter = 0\n else:\n break\n try:\n self.__connect_threadlock.release()\n except Exception:\n self.logger.debug(f'{self._id} exception while trying self.__connect_threadlock.release()')\n pass\n\n def _connect(self):\n \"\"\"\n Initiate connection.\n \"\"\"\n self.logger.debug(f'{self._id} connecting using TCP/{\"IPv6\" if self._family == socket.AF_INET6 else \"IPv4\"} {\"with\" if self._autoreconnect else \"without\"} autoreconnect')\n # Try to connect to remote host using ip (v4 or v6)\n try:\n self._socket = socket.socket(self._family, socket.SOCK_STREAM)\n self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)\n self._socket.settimeout(5)\n self._socket.connect((f'{self._hostip}', int(self._port)))\n self._socket.settimeout(self._timeout)\n self._is_connected = True\n self.logger.info(f'{self._id} connected')\n # Connection error\n except Exception as err:\n self._is_connected = False\n self._connect_counter += 1\n self.logger.warning(f'{self._id} TCP connection failed {self._connect_counter}/{self._connect_retries} times, last error was: {err}')\n\n def __receive_thread_worker(self):\n \"\"\"\n Thread worker to handle receiving.\n \"\"\"\n self.logger.debug(f'{self._id} started receive thread')\n waitobj = IOWait()\n waitobj.watch(self._socket, read=True)\n __buffer = b''\n\n self._is_receiving = True\n if self._receiving_callback:\n self._receiving_callback(self)\n # try to find possible \"hidden\" errors\n try:\n while self._is_connected and self.__running:\n events = waitobj.wait(1000) # BMX\n for fileno, read, write in events: # BMX\n if read:\n timeout = False\n try:\n msg = self._socket.recv(4096)\n except TimeoutError:\n msg = None\n timeout = True\n # Check if incoming message is not empty\n if msg:\n # TODO: doing this breaks line separation if multiple lines\n # are read at a time, the next loop can't split it\n # because line endings are missing\n # find out reason for this operation...\n\n # # If we transfer in text mode decode message to string\n # # if not self._binary:\n # # msg = str.rstrip(str(msg, 'utf-8')).encode('utf-8')\n\n # If we work in line mode (with a terminator) slice buffer into single chunks based on terminator\n if self.terminator:\n __buffer += msg\n while True:\n # terminator = int means fixed size chunks\n if isinstance(self.terminator, int):\n i = self.terminator\n if i > len(__buffer):\n break\n # terminator is str or bytes means search for it\n else:\n i = __buffer.find(self.terminator)\n if i == -1:\n break\n i += len(self.terminator)\n line = __buffer[:i]\n __buffer = __buffer[i:]\n if self._data_received_callback is not None:\n try:\n self._data_received_callback(self, line if self._binary else str(line, 'utf-8').strip())\n except Exception as iex:\n self._log_exception(iex, f'lib.network {self._id} receive in terminator mode calling data_received_callback {self._data_received_callback} failed: {iex}')\n # If not in terminator mode just forward what we received\n else:\n if self._data_received_callback is not None:\n try:\n self._data_received_callback(self, msg)\n except Exception as iex:\n self._log_exception(iex, f'lib.network {self._id} calling data_received_callback {self._data_received_callback} failed: {iex}')\n # If empty peer has closed the connection\n else:\n if self.__running:\n self._is_receiving = False\n self._is_connected = False\n try:\n self._socket.shutdown()\n except Exception:\n pass\n if timeout:\n # TimeoutError exception caught\n self.logger.warning(f'{self._id} connection timed out, disconnecting.')\n else:\n # default state, peer closed connection\n self.logger.warning(f'{self._id} connection closed by peer')\n waitobj.unwatch(self._socket)\n if self._disconnected_callback is not None:\n try:\n self._disconnected_callback(self)\n except Exception as iex:\n self._log_exception(iex, f'lib.network {self._id} calling disconnected_callback {self._disconnected_callback} failed: {iex}')\n if self._autoreconnect:\n self.logger.debug(f'{self._id} autoreconnect enabled')\n self.connect()\n if self._is_connected:\n self.logger.debug('{self._id} set read watch on socket again')\n waitobj.watch(self._socket, read=True)\n else:\n # socket shut down by self.close, no error\n self.logger.debug('{self._id} connection shut down by call to close method')\n return\n except Exception as ex:\n if not self.__running:\n self.logger.debug(f'{self._id} receive thread shutting down')\n self._is_receiving = False\n return\n else:\n self._log_exception(ex, f'lib.network {self._id} receive thread died with unexpected error: {ex}. Go tell...')\n self._is_receiving = False\n\n def _log_exception(self, ex, msg):\n self.logger.error(msg + ' -- If stack trace is necessary, enable/check debug log')\n\n if self.logger.isEnabledFor(logging.DEBUG):\n\n # Get current system exception\n ex_type, ex_value, ex_traceback = sys.exc_info()\n\n # Extract unformatter stack traces as tuples\n trace_back = traceback.extract_tb(ex_traceback)\n\n # Format stacktrace\n stack_trace = list()\n\n for trace in trace_back:\n stack_trace.append(\"File : %s , Line : %d, Func.Name : %s, Message : %s\" % (trace[0], trace[1], trace[2], trace[3]))\n\n self.logger.debug(\"Exception type : %s \" % ex_type.__name__)\n self.logger.debug(\"Exception message : %s\" % ex_value)\n self.logger.debug(\"Stack trace : %s\" % stack_trace)\n\n def _sleep(self, time_lapse):\n \"\"\"\n Sleep (at least) seconds, but abort if self.__running changes to False.\n\n :param time_lapse: wait time in seconds\n :type time: int\n \"\"\"\n time_start = time.time()\n time_end = (time_start + time_lapse)\n while self.__running and time_end > time.time():\n # modified from 'pass' - this way intervals of 1 second are given up to other threads\n # but the abort loop stays intact with a maximum of 1 second delay\n time.sleep(1)\n\n def close(self):\n \"\"\"\n Close the current client socket.\n \"\"\"\n self.__running = False\n self.logger.info(f'{self._id} closing connection')\n if self._is_connected:\n try:\n self._socket.shutdown(socket.SHUT_RD)\n except Exception as e:\n self.logger.info(f\"socket no longer connected on disconnect, exception is {e}\")\n if self.__connect_thread is not None and self.__connect_thread.is_alive():\n self.__connect_thread.join()\n if self.__receive_thread is not None and self.__receive_thread.is_alive():\n self.__receive_thread.join()\n\n def __str__(self):\n if self.name:\n return self.name\n else:\n return super().__str__()\n\n\nclass ConnectionClient(object):\n \"\"\"\n Client object that represents a connected client returned by a Tcp_server instance on incoming connection.\n\n This class should normally **not be instantiated manually**, but is provided by the Tcp_server via the callbacks\n\n :param server: The tcp_server passes a reference to itself to access parent methods\n :param socket: socket.Socket class used by the Client object\n :param fd: File descriptor of socket used by the Client object\n\n :type server: tcp_server\n :type socket: function\n :type fd: int\n \"\"\"\n\n def __init__(self, server=None, socket=None, ip=None, port=None, name=None):\n self.logger = logging.getLogger(__name__)\n self.name = name\n self.ip = ip\n self.port = port\n self.family = None\n self.writer = None\n self.process_iac = True\n\n self._data_received_callback = None\n self._will_close_callback = None\n self.__server = server\n self.__socket = socket\n\n self._id = f'({self.name if self.name else \"Connection\"}_{self.ip}:{self.port})'\n\n @property\n def socket(self):\n \"\"\"\n Socket getter.\n \"\"\"\n return self.__socket\n\n def set_callbacks(self, data_received=None, will_close=None):\n \"\"\"\n Set callbacks for different socket events (client based).\n\n :param data_received: Called when data is received\n :type data_received: function\n \"\"\"\n self._data_received_callback = data_received\n self._will_close_callback = will_close\n\n async def __drain_writer(self):\n \"\"\"\n Ensure drain() is called.\n \"\"\"\n with suppress(ConnectionResetError):\n await self.writer.drain()\n\n def send(self, message):\n \"\"\"\n Send a string to connected client.\n\n :param msg: Message to send\n :type msg: string | bytes | bytearray\n\n :return: True if message has been queued successfully.\n :rtype: bool\n \"\"\"\n if not isinstance(message, (bytes, bytearray)):\n try:\n message = message.encode('utf-8')\n except Exception:\n self.logger.warning(f'{self._id} error encoding data')\n return False\n try:\n\n self.writer.write(message)\n asyncio.ensure_future(self.__drain_writer())\n except Exception as e:\n self.logger.warning(f'{self._id} error sending data: {e}')\n return False\n return True\n\n def send_echo_off(self):\n \"\"\"\n Send an IAC telnet command to ask client to turn its echo off.\n \"\"\"\n command = bytearray([0xFF, 0xFB, 0x01])\n string = self._iac_to_string(command)\n self.logger.debug(f'{self._id} sending IAC telnet command: {string}')\n self.send(command)\n\n def send_echo_on(self):\n \"\"\"\n Send an IAC telnet command to ask client to turn its echo on again.\n \"\"\"\n command = bytearray([0xFF, 0xFC, 0x01])\n string = self._iac_to_string(command)\n self.logger.debug(f'{self._id} sending IAC telnet command: {string}')\n self.send(command)\n\n def _process_IAC(self, msg):\n \"\"\"\n Process incomming IAC messages.\n\n NOTE: Does nothing for now except logging them in clear text\n \"\"\"\n if len(msg) >= 3:\n string = self._iac_to_string(msg[:3])\n self.logger.debug(f'{self._id} received IAC telnet command: {string}')\n msg = msg[3:]\n return msg\n\n def close(self):\n \"\"\"\n Close client socket.\n \"\"\"\n if self._will_close_callback:\n self._will_close_callback(self)\n self.set_callbacks(data_received=None, will_close=None)\n self.writer.close()\n return True\n\n def _iac_to_string(self, msg):\n iac = {1: 'ECHO', 251: 'WILL', 252: 'WON\\'T', 253: 'DO', 254: 'DON\\'T', 255: 'IAC'}\n string = ''\n for char in msg:\n if char in iac:\n string += iac[char] + ' '\n else:\n string += chr(char)\n return string.rstrip()\n\n def __str__(self):\n if self.name:\n return self.name\n else:\n return super().__str__()\n\n\nclass Tcp_server(object):\n \"\"\"\n Threaded TCP listener which dispatches connections (and possibly received data) via callbacks.\n\n NOTE: The callbacks need to expect the following arguments:\n\n - ``incoming_connection(server, client)`` where ``server`` ist the ``Tcp_server`` instance and ``client`` is a ``ConnectionClient`` for the current connection\n - ``data_received(server, client, data)`` where ``server`` ist the ``Tcp_server`` instance, ``client`` is a ``ConnectionClient`` for the current connection, and ``data`` is a string containing received data\n - ``disconnected(server, client)`` where ``server`` ist the ``Tcp_server`` instance and ``client`` is a ``ConnectionClient`` for the closed connection\n\n :param host: Local host name or ip address (v4 or v6). Default is '::' which listens on all IPv4 and all IPv6 addresses available.\n :param port: Local port to connect to\n :param name: Name of this connection (mainly for logging purposes)\n\n :type host: str\n :type port: int\n :type name: str\n \"\"\"\n\n MODE_TEXT = 1\n MODE_TEXT_LINE = 2\n MODE_BINARY = 3\n MODE_FIXED_LENGTH = 4\n\n def __init__(self, port, host='', name=None, mode=MODE_BINARY, terminator=None):\n self.logger = logging.getLogger(__name__)\n\n # public properties\n self.name = name\n self.mode = mode\n self.terminator = terminator\n\n # private properties\n self._host = host\n self._port = port\n self._is_listening = False\n self._timeout = 1\n\n self._ipaddr = None\n self._family = socket.AF_INET\n self._socket = None\n\n self._incoming_connection_callback = None\n self._data_received_callback = None\n\n # protected properties\n self.__loop = None\n self.__coroutine = None\n self.__server = None\n self.__listening_thread = None\n self.__running = True\n\n # Test if host is an ip address or a host name\n self._id = f'({self.name if self.name else \"TCP_Server\"}_{self._host}:{self._port})'\n\n (self._ipaddr, self._port, self._family) = Network.validate_inet_addr(host, port)\n\n if self._ipaddr is not None:\n self._id = f'({self.name if self.name else \"TCP_Server\"}_{self._ipaddr}:{self._port})'\n self.__our_socket = Network.ip_port_to_socket(self._ipaddr, self._port)\n if not self.name:\n self.name = self.__our_socket\n\n def set_callbacks(self, incoming_connection=None, disconnected=None, data_received=None):\n \"\"\"\n Set callbacks to caller for different socket events.\n\n :param connected: Called whenever a connection is established successfully\n :param data_received: Called when data is received\n :param disconnected: Called when a connection has been dropped for whatever reason\n\n :type connected: function\n :type data_received: function\n :type disconnected: function\n \"\"\"\n self._incoming_connection_callback = incoming_connection\n self._data_received_callback = data_received\n self._disconnected_callback = disconnected\n\n def start(self):\n \"\"\"\n Start the server socket.\n\n :return: False if an error prevented us from launching a connection thread. True if a connection thread has been started.\n :rtype: bool\n \"\"\"\n if self._is_listening:\n return False\n try:\n self.logger.info(f'{self._id} starting up TCP server socket')\n self.__loop = asyncio.new_event_loop()\n asyncio.set_event_loop(self.__loop)\n self.__coroutine = asyncio.start_server(self.__handle_connection, self._ipaddr, self._port)\n self.__server = self.__loop.run_until_complete(self.__coroutine)\n\n self.__listening_thread = threading.Thread(target=self.__listening_thread_worker, name=f'TCPServer {self._id}')\n self.__listening_thread.daemon = True\n self.__listening_thread.start()\n except Exception as e:\n self.logger.error(f'{self._id} error starting server: {e}')\n return False\n return True\n\n def __listening_thread_worker(self):\n \"\"\"\n Run the asyncio loop in a separate thread to not block the Tcp_server.start() method.\n \"\"\"\n asyncio.set_event_loop(self.__loop)\n self._is_listening = True\n try:\n self.__loop.run_forever()\n except Exception:\n self.logger.debug(f'{self._id} error in loop.run_forever()')\n finally:\n for task in asyncio.all_tasks(self.__loop):\n task.cancel()\n self.__server.close()\n self.__loop.run_until_complete(self.__server.wait_closed())\n try:\n self.__loop.close()\n except Exception:\n pass\n self._is_listening = False\n\n async def __handle_connection(self, reader, writer):\n \"\"\"\n Handle incoming connection.\n\n Each client gets its own handler.\n \"\"\"\n peer = writer.get_extra_info('peername')\n socket_object = writer.get_extra_info('socket')\n peer_socket = Network.ip_port_to_socket(peer[0], peer[1])\n\n client = ConnectionClient(server=self, socket=socket_object, ip=peer[0], port=peer[1])\n client.family = socket.AF_INET6 if Utils.is_ipv6(client.ip) else socket.AF_INET\n client.name = Network.ip_port_to_socket(client.ip, client.port)\n client.writer = writer\n\n self.logger.info(f'{self._id} incoming connection from {peer_socket}')\n if self._incoming_connection_callback:\n self._incoming_connection_callback(self, client)\n\n while True:\n try:\n if self.mode == self.MODE_TEXT_LINE:\n # self.logger.debug(\"***\")\n data = await reader.readline()\n else:\n data = await reader.read(4096)\n except Exception:\n data = None\n\n if data and data[0] == 0xFF and client.process_iac:\n data = client._process_IAC(data)\n if data:\n try:\n string = str.rstrip(str(data, 'utf-8'))\n self.logger.debug(f'{self._id} received \"{string}\" from {client.name}')\n if self._data_received_callback:\n self._data_received_callback(self, client, string)\n if client._data_received_callback:\n client._data_received_callback(self, client, string)\n except Exception as e:\n self.logger.debug(f'{self._id} received undecodable bytes from {client.name}: {data}, resulting in error: {e}')\n else:\n try:\n self.__close_client(client)\n pass\n finally:\n del client\n return\n\n def __close_client(self, client):\n \"\"\"\n Close client connection.\n\n :param client: client object\n :type client: lib.network.ConnectionClient\n \"\"\"\n self.logger.info(f'{self._id} connection to client {client.name} closed')\n if self._disconnected_callback:\n self._disconnected_callback(self, client)\n client.writer.close()\n\n def listening(self):\n \"\"\"\n Return the current listening state.\n\n :return: True if the server socket is actually listening, else False.\n :rtype: bool\n \"\"\"\n return self._is_listening\n\n def send(self, client, msg):\n \"\"\"\n Send a string to connected client.\n\n :param client: Client Object to send message to\n :param msg: Message to send\n\n :type client: lib.network.ConnectionClient\n :type msg: string | bytes | bytearray\n\n :return: True if message has been queued successfully.\n :rtype: bool\n \"\"\"\n client.send(msg)\n return True\n\n def disconnect(self, client):\n \"\"\"\n Disconnect a specific client.\n\n :param client: Client Object to disconnect\n :type client: lib.network.ConnectionClient\n \"\"\"\n client.close()\n return True\n\n def close(self):\n \"\"\"\n Close running listening socket.\n \"\"\"\n self.logger.info(f'{self._id} shutting down listening socket')\n asyncio.set_event_loop(self.__loop)\n try:\n active_connections = len([task for task in asyncio.all_tasks(self.__loop) if not task.done()])\n except Exception:\n active_connections = 0\n if active_connections > 0:\n self.logger.info(f'{self._id} still has {active_connections} active connection(s), cleaning up')\n self.__running = False\n self.__loop.call_soon_threadsafe(self.__loop.stop)\n while self.__loop.is_running():\n pass\n with suppress(AttributeError): # thread can disappear between first and second condition test\n if self.__listening_thread and self.__listening_thread.is_alive():\n self.__listening_thread.join()\n self.__loop.close()\n\n def __str__(self):\n if self.name:\n return self.name\n else:\n return super().__str__()\n\n\nclass Udp_server(object):\n \"\"\"\n Threaded UDP listener which dispatches received data via callbacks.\n\n NOTE: The callbacks need to expect the following arguments:\n\n - ``data_received(addr, data)`` where ``addr`` is a tuple with ``('', remote_port)`` and ``data`` is the received data as string\n\n :param host: Local hostname or ip address (v4 or v6). Default is '' which listens on all IPv4 addresses available.\n :param port: Local port to connect to\n :param name: Name of this connection (mainly for logging purposes)\n\n :type host: str\n :type port: int\n :type name: str\n \"\"\"\n\n def __init__(self, port, host='', name=None):\n self.logger = logging.getLogger(__name__)\n\n # Public properties\n self.name = name\n\n # protected properties\n self._host = host\n self._port = port\n self._is_listening = False\n\n self._ipaddr = None\n self._family = socket.AF_INET\n self._socket = None\n\n self._data_received_callback = None\n\n # provide a shutdown timeout for the server loop. emergency fallback only\n self._close_timeout = 2\n\n # private properties\n self.__coroutine = None\n self.__loop = asyncio.new_event_loop()\n asyncio.set_event_loop(self.__loop)\n\n self.__server = aioudp.aioUDPServer()\n self.__listening_thread = None\n self.__running = True\n\n self._id = f'({self.name if self.name else \"UDP_Server\"}_{self._host}:{self._port})'\n\n # create sensible ipaddr (resolve host, handle protocol family)\n (self._ipaddr, self._port, self._family) = Network.validate_inet_addr(host, port)\n\n if self._ipaddr is not None:\n self._id = f'({self.name if self.name else \"UDP_Server\"}_{self._ipaddr}:{self._port})'\n self.__our_socket = Network.ip_port_to_socket(self._ipaddr, self._port)\n if not self.name:\n self.name = self.__our_socket\n else:\n self.__running = False\n\n def start(self):\n \"\"\"\n Start the server socket.\n\n :return: False if an error prevented us from launching a connection thread. True if a connection thread has been started.\n :rtype: bool\n \"\"\"\n if not self.__running:\n self.logger.error(f'{self._id} UDP server not initialized, can not start.')\n return False\n if self._is_listening:\n self.logger.warning(f'{self._id} already listening, not starting again')\n return False\n try:\n self.logger.info(f'{self._id} starting up UDP server socket')\n self.__coroutine = self.__start_server()\n self.__loop.run_until_complete(self.__coroutine)\n\n self.__listening_thread = threading.Thread(target=self.__listening_thread_worker, name=f'UDP_Server {self._id}')\n self.__listening_thread.daemon = True\n self.__listening_thread.start()\n except Exception as e:\n self.logger.error(f'{self._id} error {e} setting up udp server')\n return False\n return True\n\n def set_callbacks(self, data_received=None):\n \"\"\"\n Set callbacks to caller for different socket events.\n\n :param data_received: Called when data is received\n\n :type data_received: function\n \"\"\"\n self._data_received_callback = data_received\n\n def listening(self):\n \"\"\"\n Return the current listening state.\n\n :return: True if the server socket is actually listening, else False.\n :rtype: bool\n \"\"\"\n return self._is_listening\n\n def close(self):\n \"\"\"\n Close running listening socket.\n \"\"\"\n self.logger.info(f'{self._id} shutting down listening socket')\n asyncio.set_event_loop(self.__loop)\n self.__running = False\n self.__server.stop()\n\n # cancel pending tasks\n tasks = [t for t in asyncio.all_tasks(self.__loop) if t is not asyncio.current_task(self.__loop)]\n [task.cancel() for task in tasks]\n\n # close loop gracefully\n self.__loop.call_soon_threadsafe(self.__loop.stop)\n\n # this code shouldn't be needed, but include it with timeout just to be sure...\n starttime = time.time()\n while self.__loop.is_running() and time.time() < starttime + self._close_timeout:\n pass\n if self.__loop.is_running():\n self.__loop.stop()\n time.sleep(0.5)\n\n with suppress(AttributeError): # thread can disappear between first and second condition test\n if self.__listening_thread and self.__listening_thread.is_alive():\n self.__listening_thread.join()\n self.__loop.close()\n\n async def __start_server(self):\n \"\"\"\n Start the actual server class.\n \"\"\"\n self.__server.run(self._ipaddr, self._port, self.__loop)\n self.__server.subscribe(self.__handle_connection)\n\n def __listening_thread_worker(self):\n \"\"\"\n Run the asyncio loop in a separate thread to not block the Udp_server.start() method.\n \"\"\"\n self._is_listening = True\n self.logger.debug('{self._id} listening thread set is_listening to True')\n asyncio.set_event_loop(self.__loop)\n try:\n self.__loop.run_forever()\n except Exception as e:\n self.logger.debug(f'{self._id} error in loop.run_forever(): {e}')\n finally:\n self.__server.stop()\n self.__loop.close()\n self._is_listening = False\n return True\n\n async def __handle_connection(self, data, addr):\n \"\"\"\n Handle incoming connection.\n\n As UDP is stateless, each datagram creates a new handler.\n\n :param data: data received from socket\n :type data: bytes\n :param addr: address info ('addr', port)\n :type addr: tuple\n \"\"\"\n if addr:\n host, port = addr\n else:\n self.logger.debug(f'{self._id} address info {addr} not in format \"(host, port)\"')\n host = '0.0.0.0'\n port = 0\n\n self.logger.info(f'{self._id} incoming datagram from {host}:{port}')\n\n if data:\n try:\n string = str.rstrip(str(data, 'utf-8'))\n self.logger.debug(f'{self._id} received \"{string}\" from {host}:{port}')\n if self._data_received_callback:\n self._data_received_callback(addr, string)\n except UnicodeError:\n self.logger.debug(f'{self._id} received undecodable bytes from {host}:{port}')\n else:\n self.logger.debug(f'{self._id} received empty datagram from {host}:{port}')\n\n def __str__(self):\n if self.name:\n return self.name\n else:\n return super().__str__()\n","repo_name":"smarthomeNG/smarthome","sub_path":"lib/network.py","file_name":"network.py","file_ext":"py","file_size_in_byte":58999,"program_lang":"python","lang":"en","doc_type":"code","stars":115,"dataset":"github-code","pt":"70"} +{"seq_id":"39795894156","text":"import re\nimport pandas\nimport matplotlib.pyplot as plt\nimport calendar\n\n\ndef findMostUsedWord():\n d = {}\n for i in words_list:\n if i in d.keys():\n d[i] = d[i] + 1\n elif i.isalpha():\n d[i] = 1\n max = 0\n key = ''\n for k, v in d.items():\n if v > max:\n max = v\n key = k\n return key.capitalize(), max\n\ndef findTopFiveUsers():\n d = []\n for i in df_user.iterrows():\n d.append([i[1][0], i[0]])\n d.sort(reverse=True)\n top5_names = []\n top5_no_of_msgs = []\n c = 0\n for i in d:\n if i[1] != 'N/A':\n top5_no_of_msgs.append(i[0])\n top5_names.append(i[1])\n c += 1\n if c == 5: break\n return top5_no_of_msgs,top5_names\n\n# Here ex2 file contains our whatsapp Chat Data\nf = open('ex2.txt', encoding='utf8')\nl = []\nind = -1\nfor i in f:\n s = i.rstrip('\\n')\n r = re.match(r'[\\d]{1,2}/[\\d]{1,2}/[\\d]{2}, [\\d]{2}:[\\d]{2} -', s)\n if r:\n l.append(s)\n ind = ind + 1\n else:\n l[ind] = l[ind] + s\n\nf.close()\n\ndata = []\nwords_list = []\nday_names = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']\nmonth_names = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October',\n 'November', 'December'];\nfor i in l:\n a = i.strip().split(',')\n a1 = a[0]\n a2, a3, a4 = a[0].strip().split('/')\n a5 = day_names[calendar.weekday(int(a4), int(a2), int(a3))]\n a6 = a[1].strip().split('-')\n a7= a6[0].split(':')[0]\n c = a6[1].split(':')\n a8 = ''\n a9 = ''\n if len(c) >= 2 and 'changed' not in c[0]:\n a8 = c[0].strip()\n a9 = c[1]\n else:\n a8 = 'N/A'\n a9 = c[0]\n r = re.split(r'\\W+', a9.lower())\n words_list.extend(r)\n a10 = len(a9)\n data.append([a1, a3, month_names[int(a2) - 1], '20' + a4, a5, a6[0], a7, a8, a9, a10])\n\ndf = pandas.DataFrame(data,\n columns=['Date', 'Day', 'Month', 'Year', 'Day_Name', 'Time', 'Time_H', 'User', 'Message',\n 'MessageSize'])\nprint(df)\n\ndf_user = df.groupby(['User']).count()\ndf_user.plot.bar(y='Message', title='Number of Messages by Each User')\nplt.show()\n\ndf_month = df.groupby(['Month']).count()\ndf_month.plot.bar(y='Message', title='Number of Messages in each Month')\nplt.show()\n\ndf_timeh = df.groupby(['Time_H']).count()\ndf_timeh.plot.bar(y='Message', title='Number of Messages in each Hour')\nplt.show()\n\ndf_year = df.groupby(['Year']).count()\ndf_year.plot.bar(y='Message', title='Number of Messages in each Year')\nplt.show()\n\ndf_dayname = df.groupby(['Day_Name']).count()\ndf_dayname.plot.barh(y='Message', title='Number of Messages according to day_wise ')\nplt.show()\n\nprint('Longest Message is:', df['MessageSize'].max(), 'characters')\nmuw, muwt = findMostUsedWord()\nprint('The Most Used Word is:' + muw + '-> used ' + str(muwt) + ' times')\n\n# for top 5 users\ntop5=findTopFiveUsers()\nplt.pie(top5[0], labels=top5[1])\nplt.title('Top 5 Users')\nplt.show()\n\n'''\n 2/8/00, 08:02 - Dharmaraju: hello\n a1=Whole Date as a String(ex:2/8/00)\n a2=Month(ex:2)\n a3=Day(ex:8)\n a4=Year(ex:00)\n a5=Day_Name(ex:Wednesday)\n a6[0]=Whole Time(ex->08:02)\n a7=Only Hour (ex:08)\n a8=UserName(ex:'Dharmaraju')\n a9=UserMessage(ex:'hello')\n a10=MessageSize(ex:5)\n words_list[]=It contains all words used in all messages\n l[]=It contains lines which follows the pattern\n data[]=contains list of values which is to be stored in df\n df=original dataframe\n df_user=dataframe grouped by user\n and so on\n '''","repo_name":"dp-82/WhatsApp-Chat-Analyzer-Using-Python","sub_path":"WhatsAppMain.py","file_name":"WhatsAppMain.py","file_ext":"py","file_size_in_byte":3621,"program_lang":"python","lang":"en","doc_type":"code","stars":2,"dataset":"github-code","pt":"70"} +{"seq_id":"12648373812","text":"import random\nimport itertools\nfrom cube import cube\n\ndef fuzzOrder():\n c = cube()\n\n orders = set([])\n acts = {}\n for length in range(7):\n for actions in itertools.product(\"UDLRFB\", repeat=length):\n actions = \"\".join(actions)\n order = c.order(actions)\n orders.add(order)\n res = fun(order)\n acts[res] = actions\n print(acts)\n return orders\n\ndef fun(num):\n res = \"\"\n if not isinstance(num,int) or num <0:\n print(\"it is not a correct number\")\n elif num==1:\n return \"1\"\n else:\n while num != 1:\n for i in range (2,num+1):\n if num%i==0:\n num=int(num/i)\n if num==1:\n res += \"%d\" % i\n else:\n res += \"%d*\" % i\n break\n return res\n\nif __name__ == \"__main__\":\n c = cube()\n orders = fuzzOrder()\n #print(c.order(\"BBRFFD\"))","repo_name":"ohmyaddpub/cube","sub_path":"cube1/fuzz.py","file_name":"fuzz.py","file_ext":"py","file_size_in_byte":980,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"3886729081","text":"from django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\nfrom CRM.models import Transaction,Withdrawal, Deposit\n\n#-------------------------- After transaction operation --------------------------\n@receiver(post_save, sender=Transaction)\ndef save_transaction(sender, instance, created, **kwargs):\n if created:\n sender_account = instance.sender_account\n receiver_account = instance.receiver_account\n sender_account.balance -= instance.amount\n receiver_account.balance += instance.amount\n sender_account.save()\n receiver_account.save()\n\n#-------------------------- After transaction operation --------------------------\n@receiver(post_save, sender=Withdrawal)\ndef save_withdrawal(sender, instance, created, **kwargs):\n if created:\n account = instance.account\n account.balance -= instance.amount\n account.save()\n\n\n#-------------------------- After transaction operation --------------------------\n@receiver(post_save, sender=Deposit)\ndef save_payment(sender, instance, created, **kwargs):\n if created:\n account = instance.account\n account.balance += instance.amount\n account.save()","repo_name":"haythamdahri/BANK","sub_path":"CRM/signals.py","file_name":"signals.py","file_ext":"py","file_size_in_byte":1198,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"15288616952","text":"from pudb.ui_tools import make_canvas\n\n\ndef test_simple():\n text = \"aaaaaa\"\n canvas = make_canvas(\n txt=[text],\n attr=[[(\"var value\", len(text))]],\n maxcol=len(text) + 5\n )\n content = list(canvas.content())\n assert content == [\n [(\"var value\", None, b\"aaaaaa\"), (None, None, b\" \" * 5)]\n ]\n\n\ndef test_multiple():\n canvas = make_canvas(\n txt=[\"Return: None\"],\n attr=[[(\"return label\", 8), (\"return value\", 4)]],\n maxcol=100\n )\n content = list(canvas.content())\n assert content == [\n [(\"return label\", None, b\"Return: \"),\n (\"return value\", None, b\"None\"),\n (None, None, b\" \" * 88)]\n ]\n\n\ndef test_boundary():\n text = \"aaaaaa\"\n canvas = make_canvas(\n txt=[text],\n attr=[[(\"var value\", len(text))]],\n maxcol=len(text)\n )\n assert list(canvas.content()) == [[(\"var value\", None, b\"aaaaaa\")]]\n\n\ndef test_byte_boundary():\n text = \"aaaaaaé\"\n canvas = make_canvas(\n txt=[text],\n attr=[[(\"var value\", len(text))]],\n maxcol=len(text)\n )\n assert list(canvas.content()) == [[(\"var value\", None, b\"aaaaaa\\xc3\\xa9\")]]\n\n\ndef test_wide_chars():\n text = \"data: '中文'\"\n canvas = make_canvas(\n txt=[text],\n attr=[[(\"var label\", 6), (\"var value\", 4)]],\n maxcol=47,\n )\n assert list(canvas.content()) == [[\n (\"var label\", None, b\"data: \"),\n (\"var value\", None, \"'中文'\".encode()),\n (None, None, b\" \"*(47 - 12)), # 10 chars, 2 of which are double width\n ]]\n\n\nif __name__ == \"__main__\":\n import sys\n if len(sys.argv) > 1:\n exec(sys.argv[1])\n else:\n from pytest import main\n main([__file__])\n","repo_name":"inducer/pudb","sub_path":"pudb/test/test_make_canvas.py","file_name":"test_make_canvas.py","file_ext":"py","file_size_in_byte":1732,"program_lang":"python","lang":"en","doc_type":"code","stars":2783,"dataset":"github-code","pt":"70"} +{"seq_id":"39636186084","text":"import sys,collections\nfrom math import sqrt,sin,cos,acos,atan2,degrees,fabs,pow,modf,fmod\n\n\nclass Lengths(collections.namedtuple('lengths',['a','b'])):\n '''Lengths'''\n\nclass Point(collections.namedtuple('point',['x','y'])):\n '''Point'''\n\n\n\nclass Plotter:\n\n #COMMAND\n #F000\n COLOR = 1 # Change color command\n DRAW = 0 # Draw (or move)\n\n \n #Motor commands for A and B motors\n #left = 0F00, right = 00F0\n FWD = 1\n REV = 2\n\n #pen position when command = DRAW\n #000F\n UP = 1\n DWN = 2\n NIL = 0\n\n #pen color in the final 4 bits when command = COLOR\n #000F\n CYAN = 0\n MAGENTA = 1\n YELLOW = 2 \n BLACK = 3\n\n\n PIXELS_PER_INCH = 25.40\n \n def __init__(self, plotfile):\n \n #self.W = 942.975 # 37.125\" -- distance between the motor points, 25.40 pixels per inch\n self.W = 48*Plotter.PIXELS_PER_INCH\n \n self.stepLength = .00314*Plotter.PIXELS_PER_INCH # MICROSTEPS => 6.28\" for 2000 steps ==> .00314\" per step\n # distance the belt moves per step \n #self.startLengths = Lengths(508.0,508.0) # Starting point (arbitrary, but should be roughly correct)\n self.startLengths = Lengths(676.0,676.0) # Starting point (arbitrary, but should be roughly correct)\n #self.currentLengths = Lengths(508.0,508.0) \n self.currentLengths = self.startLengths\n self.plotfile = plotfile\n #self.pixelsPerStep = .1\n self.pixelsPerStep = self.stepLength # not sure why this is different\n\n _bytes = bytearray(1)\n def write2(self,b):\n # skip if it's just a 1 and not moving \n if (b>2):\n self._bytes[0] = b\n self.plotfile.write(self._bytes)\n\n def changeColor(self, color):\n print(\"calling change color %d\" % color)\n b = (1 << 6) + color\n try:\n self.write(b)\n except (ValueError):\n print(\"got value error writing: %s \" % b)\n raise ValueError\n\n\n def drawLineTo(self,p2,pendown):\n pencommand = 2 if pendown else 1\n\n p1 = self.pointFromLengths(self.currentLengths)\n #print \"drawing from \" ,p1, \" to \", p2\n\n last = self.currentLengths\n\n dx = p2.x-p1.x\n dy = p2.y-p1.y\n\n d = sqrt(pow(dx,2)+pow(dy,2))\n step = self.pixelsPerStep/d\n\n i = 0.0\n count = 0\n while i<=1.0:\n count=count+1\n precise = self.lengthsFromPoint(Point(p1.x+dx*i,p1.y+dy*i))\n da = precise.a-last.a\n db = precise.b-last.b\n # do I step?\n sa = 0\n sb = 0\n if fabs(da)>self.stepLength:\n sa = 1 if da > 0 else -1\n if fabs(db)>self.stepLength:\n sb = 1 if db > 0 else -1\n # print \"steps: \",[sa,sb]\n b = ((2 if sa<0 else sa) << 4) + ((2 if sb<0 else sb)<<2) + pencommand\n # print \"writing \", sa,b\n try:\n self.write2(b)\n except (ValueError):\n print(\"got value error writing: %s \" % b)\n raise ValueError\n self.currentLengths = Lengths(last.a+sa*self.stepLength,last.b+sb*self.stepLength)\n last = self.currentLengths\n i += step\n\n def reset(self):\n self.moveTo(self.startLengths)\n\n def moveTo(self,to):\n # print \"moving to \", to\n steplength=self.stepLength\n fromL=self.currentLengths\n toL=to\n direction = Lengths(1 if fromL.a<=toL.a else -1,1 if fromL.b<=toL.b else -1)\n #print direction\n la = fromL.a\n lb = fromL.b\n while True:\n #print \"l= \",(la,lb)\n stepa=0\n stepb=0\n if (direction.a>0):\n if latoL.a:\n stepa=2\n la=la-steplength\n if (direction.b>0):\n if lbtoL.b:\n stepb=2\n lb=lb-steplength\n b = (stepa << 4) + (stepb<<2) + 1\n if stepa==0 and stepb==0:\n break\n self.write2(b)\n self.currentLengths=Lengths(la,lb)\n\n\n def lengthsFromPoint(self,p):\n l = Lengths(sqrt(p.y**2+p.x**2),sqrt(p.y**2+(self.W-p.x)**2))\n # print \"calculated point \", p, \" to lengths \",l, \" back to point \", self.pointFromLengths(l)\n # if (p.x!=p2.x || p.y!=p2.y) print \"not equivalent (p\n return l\n\n\n def pointFromLengths(self,l):\n #print \"running for \",self.W,\" and \",l\n x = (l.a**2-l.b**2+self.W**2)/(2*self.W*l.a)\n try:\n al=acos(x)\n #print \"got al = \", degrees(al)\n return Point(l.a*cos(al),l.a*sin(al))\n except (ValueError):\n if (l.a+l.b)>4&1)\n a-=(s>>4&2)\n b+=(s>>2&1)\n b-=(s>>2&2)\n print(\"a=\"+str(a)+\"; b=\"+str(b))\n","repo_name":"sparamona/vplotter","sub_path":"Plotter.py","file_name":"Plotter.py","file_ext":"py","file_size_in_byte":5518,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"70"} +{"seq_id":"8406730771","text":"# Crie um programa que tenha a função leiaInt(), que vai funcionar de forma semelhante 'a função input() do Python,\n# só que fazendo a validação para aceitar apenas um valor numérico.\n\n\ndef lerInteiro(txt):\n n = input(txt)\n while not n.isnumeric():\n if n[0:1] == '-':\n n = n[1:]\n if n.isnumeric():\n return '-' + n\n print('ERRO! Digite um número inteiro válido.')\n n = input(txt)\n return n\n\n\nn = lerInteiro('Digite um número: ')\nprint(f'Você acabou de digitar o número {n}.')\n","repo_name":"AndradeHugo/CursoPython","sub_path":"aula21Funcoes/104ValidandoEntradaDeDadosEmPython.py","file_name":"104ValidandoEntradaDeDadosEmPython.py","file_ext":"py","file_size_in_byte":555,"program_lang":"python","lang":"pt","doc_type":"code","stars":0,"dataset":"github-code","pt":"70"} +{"seq_id":"40727944210","text":"\n\"\"\" \n 思路 : \n 这一题看到题目的硬币array大小大概就知道不能使用backtrack \n 不过可以透过DP来处理 \n \n 这一题也是卡住想了一段时间 , 尝试去构思一维DP的作法 , 用dp[i]表示换i块的方法数\n 但似乎不可行 \n \n 改以东哥的观点 -> 将此问题视为背包问题 , \n amount是我们的最大容量 , coins则代表可以无限取用的物品 \n \n 定义dp[i][j] : 在考虑前i个物品的情况下 ,amount=j 时的兑换方法数 \n \n base case: \n dp[0][:] => 0 无法兑换 \n dp[:][0] => 1 只有1种? \n \n 状态转移方程式 :\n\n 1.有了新的面额可用 , 则加入这个面额后可用的方法数为 \n -> 没有这个面额前凑到 j - coins[i] 的方法数 \n -> 没有这个面额也要凑到j的方法数\n\n dp[i][j] = dp[i-1][ j - coins[i] ] + dp[i-1][j]\n\n # 我一开始的思路有错误在此 , 在确定要使用这个新面额时的状态转移应该是 :\n\n 并不是\"没有这个面额时凑到 j - coins[i] 的方法数\" , 而应该是有这个面额时凑 j - coins[i] 的方法数\n\n dp[i][j] = dp[i][ j - coins[i] ] + dp[i-1][j] \n\n\n \n 2. 一旦新的面额大于目前背包size , 那这个面额也不能选 \n dp[i][j] = dp[i-1][j] , if j - coins[i] < 0 \n \n \n\n\"\"\"\n\nfrom typing import List \n\nclass Solution:\n \n def change(self, amount: int, coins: List[int]) -> int:\n \n # create dp-table : 面额种类 * amount(重量)\n dp = [[0 for i in range(amount+1)] for j in range(len(coins)+1)]\n \n # 无论面额种类是啥 , 如果amount=0 , 那凑到的方法只有一种(不选)\n for i in range(len(coins)+1): dp[i][0] = 1 \n \n \n # 外层推进状态一 : 面额种类 \n for i in range( 1 , len(coins)+1) : \n \n # 内层推进状态二 : 凑集到硬币数量 \n for j in range( 1 , amount+1 ): \n \n # 如果当下的面额是完全大过重量 , 那dp[i][j] = dp[i-1][j]\n # 记得对coins加入index offset \n\n if j - coins[i-1] < 0 : \n dp[i][j] = dp[i-1][j] \n continue \n \n #-> 采用了这个面额,并凑到 j - coins[i] 的方法数 ( 这些方法都是再+ coins[i-1]就达到j )\n #-> 没有这个面额也要凑到j的方法数\n dp[i][j] = dp[i][ j - coins[i-1] ] + dp[i-1][j] \n \n # 依据dp-table的定义 , 答案是可以凑到amount的方法数 \n # 即 dp[len(coins)][amount]\n return dp[len(coins)][amount]\n \n \n \nS = Solution() \nprint(S.change(5 , [1,2,5]))\n \n\n \n ","repo_name":"croso1024/Myleet","sub_path":"medium/Coin Change II.py","file_name":"Coin Change II.py","file_ext":"py","file_size_in_byte":2997,"program_lang":"python","lang":"zh","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"2516453948","text":"from django.shortcuts import render\n\n# Create your views here.\nfrom rest_framework.decorators import api_view\nfrom rest_framework.response import Response\n\nfrom api.models import Doctor\nfrom api.serializers import DoctorSerializer\n\n\n@api_view(['GET'])\ndef routes(request):\n api_urls = {\n 'doctors': '/api/',\n 'doctors sorted with specialisation': '/api/doctors//',\n 'doctors sorted with location and specialisation': '/api/doctors///',\n 'create a doctor': '/api/doctor-create/',\n }\n return Response(api_urls)\n\n\n@api_view(['GET'])\ndef get_doctors(request):\n doctors = Doctor.objects.all().order_by('-average_rating')\n serializer = DoctorSerializer(doctors, many=True)\n return Response(serializer.data, status=200)\n\n\n@api_view(['GET'])\ndef get_doctor_location(request, specialisation):\n doctors = Doctor.objects.filter(specialisation=specialisation).order_by('-average_rating')\n serializer = DoctorSerializer(doctors, many=True)\n return Response(serializer.data, status=200)\n\n\n@api_view(['GET'])\ndef get_doctor_location_specialisation(request, location, specialisation):\n doctors = Doctor.objects.filter(location=location, specialisation=specialisation).order_by('-average_rating')\n serializer = DoctorSerializer(doctors, many=True)\n return Response(serializer.data, status=200)\n\n\n@api_view(['POST'])\ndef create_doctor(request):\n serializer = DoctorSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data)\n","repo_name":"Lokeshvar08/medi-talk-api","sub_path":"api/views.py","file_name":"views.py","file_ext":"py","file_size_in_byte":1579,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"42484436105","text":"from utils.datetime_utils import format_datetime\n\n\ndef clean_data(city_raw_data_list):\n city_raw_data = city_raw_data_list[0]\n cleaned = {}\n cleaned[\"city\"] = city_raw_data[\"city\"]\n \n data = city_raw_data[\"data\"]\n cleaned[\"dt\"] = format_datetime(data[\"current\"][\"dt\"]).isoformat()\n cleaned[\"temp\"] = data[\"current\"][\"temp\"]\n cleaned[\"sunrise\"] = format_datetime(data[\"current\"][\"sunrise\"]).time().isoformat()\n cleaned[\"sunset\"] = format_datetime(data[\"current\"][\"sunset\"]).time().isoformat()\n cleaned[\"timezone\"] = data[\"timezone\"]\n \n return cleaned\n ","repo_name":"montexbjeliseo/DAChallenge","sub_path":"nivel_medio/utils/clean_data.py","file_name":"clean_data.py","file_ext":"py","file_size_in_byte":590,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"1328229661","text":"#!/usr/bin/env python\n# -*- coding: ascii -*-\n\nimport random\nimport sys\n\ndef progressBar(value, endvalue, bar_length=20):\n\n percent = float(value) / endvalue\n arrow = '-' * int(round(percent * bar_length)-1) + '>'\n spaces = ' ' * (bar_length - len(arrow))\n\n sys.stdout.write(\"\\rPercent: [{0}] {1}%\".format(arrow + spaces, int(round(percent * 100))))\n sys.stdout.flush()\n\nsize = 100000\n\nrandom_del = True\n\nentities = []\n\ncount = 0\nprint(\"Generating entries...\")\nwhile count < size:\n ln = random.randint(10, 100)\n \n i = 0\n s = \"\"\n while i < ln:\n s += chr(random.randint(65, 122))\n i += 1\n \n entities.append(s)\n count += 1\n progressBar(count, size)\n\nrelations = [ \"r1\", \"r2\", \"r3\", \"r4\", \"r5\", \"r6\", \"r7\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\",\"r14\", \"r15\"]\n\nsize = 100000\n\nmin_ent = 100\n\nprint(\"Wrinting file...\")\nwith open(\"test.txt\", \"w\") as fd:\n i = 0\n while i < size:\n val = random.randint(0, 100)\n\n if val < 75 or not random_del:\n val = random.randint(0, 100)\n\n if val < 80 and min_ent <0:\n e1 = str(entities[random.randint(0, len(entities)-1)])\n e2 = str(entities[random.randint(0, len(entities)-1)])\n rel = str(relations[random.randint(0, len(relations)-1)])\n st = \"addrel \" + e1 + \" \"+ e2 +\" \" + rel + \"\\n\" \n fd.write(st)\n else:\n rsz = random.randint(0, len(entities)-1)\n st = \"addent \" + str(entities[rsz]) + \"\\n\" \n fd.write(st)\n min_ent -= 1\n elif val < 95:\n val = random.randint(0, 100)\n\n if val < 80 and min_ent <0:\n e1 = str(entities[random.randint(0, len(entities)-1)])\n e2 = str(entities[random.randint(0, len(entities)-1)])\n rel = str(relations[random.randint(0, len(relations)-1)])\n st = \"delrel \" + e1 + \" \" + e2 +\" \" + rel + \"\\n\"\n fd.write(st)\n else:\n rsz = random.randint(0, len(entities)-1)\n st = \"delent \" + str(entities[rsz]) + \"\\n\"\n fd.write(st)\n else:\n fd.write(\"report\\n\")\n\n i += 1 \n progressBar(i, size)\n \n fd.write(\"end\\n\")\n\nprint(\"Done\")","repo_name":"rickycorte/cetrioli-fritti","sub_path":"test/addrel_gen.py","file_name":"addrel_gen.py","file_ext":"py","file_size_in_byte":2330,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"73988731429","text":"import sys\nimport os\nimport dashboard\nimport pymysql\nfrom datetime import datetime, timedelta\nfrom PyQt5.QtWidgets import (\n QTableWidget,\n QTableWidgetItem,\n QHeaderView,\n)\nfrom PyQt5 import QtCore, QtGui, QtWidgets\n\nfrom PyQt5.QtCore import QTimer\nfrom datetime import datetime, timedelta\n\nfrom PyQt5.QtWidgets import (\n QTableWidget,\n QScrollArea,\n)\nimport openpyxl\nfrom datetime import datetime\n\nimport login\n\n\nclass Logs(object):\n def __init__(self) -> None:\n self.logs_sent = False\n self.existing = False\n\n # For Excel File\n self.file_name = \"\"\n self.file_path = \"\"\n self.folder_name = \"Logs\"\n self.folder_path = rf\"C:\\Users\\SampleUser\\Desktop\\{self.folder_name}\" # Change this to your own file path\n\n # Export and Delete Analytics Every Monday (Check Every Second)\n self.timer_second = 1\n self.timer = QTimer()\n self.timer.timeout.connect(self.analytics_reset_export_check)\n self.timer.start(self.timer_second * 1000) # Execute every set self.time_second\n self.current_tbl_row_count = 0\n\n # Auto Refresh Table\n def analytics_reset_export_check(self):\n self.check_if_logs_has_new_value()\n print(self.current_tbl_row_count)\n\n def check_if_logs_has_new_value(self):\n connection = pymysql.connect(\n host=\"localhost\", user=\"root\", password=\"\", db=\"suit_db\"\n )\n cursor = connection.cursor()\n\n query = \"SELECT count(id) FROM tbl_detect_log\"\n cursor.execute(query)\n query_value = cursor.fetchone()[0]\n\n if query_value > self.current_tbl_row_count:\n self.search_logs()\n self.current_tbl_row_count = query_value\n cursor.close()\n\n def setupUi(self, MainWindow):\n self.MainWindow = MainWindow\n\n MainWindow.setObjectName(\"MainWindow\")\n self.MainWindow.showMaximized()\n\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName(\"centralwidget\")\n # Add table widget\n self.tableWidget = QTableWidget(self.centralwidget)\n self.tableWidget.setGeometry(QtCore.QRect(310, 150, 1240, 900))\n self.tableWidget.setObjectName(\"tableWidget\")\n self.tableWidget.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)\n\n # Department Filter\n self.departments = [\"ALL\", \"CABEIHM\", \"CAS\", \"CICS\", \"CET\", \"CONAHS\", \"CTE\"]\n\n self.deparmentLabel = QtWidgets.QLabel(self.centralwidget)\n self.deparmentLabel.setGeometry(QtCore.QRect(600, 95, 70, 30))\n self.deparmentLabel.setObjectName(\"deparmentLabel\")\n self.deparmentLabel.setText(\"Deparment:\")\n\n self.deparmentComboBox = QtWidgets.QComboBox(self.centralwidget)\n self.deparmentComboBox.setGeometry(QtCore.QRect(670, 95, 100, 30))\n self.deparmentComboBox.setObjectName(\"deparmentComboBox\")\n self.deparmentComboBox.addItems(self.departments)\n self.deparmentComboBox.currentTextChanged.connect(self.search_logs)\n\n # Detected Type Filter\n self.detected_type = [\"ALL\", \"PROPER\", \"IMPROPER\"]\n\n self.detectedtypeLabel = QtWidgets.QLabel(self.centralwidget)\n self.detectedtypeLabel.setGeometry(QtCore.QRect(800, 95, 70, 30))\n self.detectedtypeLabel.setObjectName(\"detectedtypeLabel\")\n self.detectedtypeLabel.setText(\"Type:\")\n\n self.detectedtypeComboBox = QtWidgets.QComboBox(self.centralwidget)\n self.detectedtypeComboBox.setGeometry(QtCore.QRect(840, 95, 100, 30))\n self.detectedtypeComboBox.setObjectName(\"detectedtypeComboBox\")\n self.detectedtypeComboBox.addItems(self.detected_type)\n self.detectedtypeComboBox.currentTextChanged.connect(self.search_logs)\n\n # Date From Filter\n self.datefromLabel = QtWidgets.QLabel(self.centralwidget)\n self.datefromLabel.setGeometry(QtCore.QRect(960, 95, 70, 30))\n self.datefromLabel.setObjectName(\"datefromLabel\")\n self.datefromLabel.setText(\"Date From:\")\n\n self.datefromDateEdit = QtWidgets.QDateEdit(\n self.centralwidget, calendarPopup=True\n )\n self.datefromDateEdit.setGeometry(QtCore.QRect(1030, 95, 80, 30))\n self.datefromDateEdit.setObjectName(\"datefromDateEdit\")\n self.datefromDateEdit.setDateTime(QtCore.QDateTime.currentDateTime())\n self.datefromDateEdit.dateChanged.connect(self.search_logs)\n\n # Date To Filter\n self.datetoLabel = QtWidgets.QLabel(self.centralwidget)\n self.datetoLabel.setGeometry(QtCore.QRect(1130, 95, 70, 30))\n self.datetoLabel.setObjectName(\"datetoLabel\")\n self.datetoLabel.setText(\"Date To:\")\n\n self.datetoDateEdit = QtWidgets.QDateEdit(\n self.centralwidget, calendarPopup=True\n )\n self.datetoDateEdit.setGeometry(QtCore.QRect(1190, 95, 80, 30))\n self.datetoDateEdit.setObjectName(\"datetoDateEdit\")\n self.datetoDateEdit.setDateTime(QtCore.QDateTime.currentDateTime())\n self.datetoDateEdit.dateChanged.connect(self.search_logs)\n\n # search bar\n self.search_has_input = False\n\n self.searchLabel = QtWidgets.QLabel(self.centralwidget)\n self.searchLabel.setGeometry(QtCore.QRect(1310, 95, 70, 30))\n self.searchLabel.setObjectName(\"searchLabel\")\n self.searchLabel.setText(\"Search:\")\n\n self.searchLineEdit = QtWidgets.QLineEdit(self.centralwidget)\n self.searchLineEdit.setGeometry(QtCore.QRect(1360, 95, 150, 30))\n self.searchLineEdit.setObjectName(\"searchLineEdit\")\n self.searchLineEdit.textChanged.connect(self.search_logs)\n\n self.exportDataBtn = QtWidgets.QPushButton(self.centralwidget)\n self.exportDataBtn.setGeometry(QtCore.QRect(1550, 90, 121, 40))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(8)\n self.exportDataBtn.setFont(font)\n self.exportDataBtn.setObjectName(\"exportDataBtn\")\n self.exportDataBtn.setText(\"Export Data\")\n self.exportDataBtn.setStyleSheet(\n \"\"\"\n QPushButton {\n background-color: #dc3545;\n border: none;\n color: white;\n padding: 8px 16px;\n border-radius: 4px;\n font-family: Arial;\n font-size: 8pt;\n }\n\n QPushButton:hover {\n background-color: #c82333;\n }\n \"\"\"\n )\n self.exportDataBtn.clicked.connect(self.export_data_to_excel)\n\n scroll_area = QScrollArea()\n scroll_area.setWidgetResizable(True)\n self.frame = QtWidgets.QFrame(self.centralwidget)\n self.frame.setGeometry(QtCore.QRect(-1, 1, 261, 2000))\n self.frame.setStyleSheet(\"background-color: #c82333;\")\n self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.frame.setFrameShadow(QtWidgets.QFrame.Raised)\n self.frame.setObjectName(\"frame\")\n self.label_4 = QtWidgets.QLabel(self.frame)\n self.label_4.setGeometry(QtCore.QRect(80, 20, 101, 91))\n self.label_4.setText(\"\")\n self.label_4.setPixmap(QtGui.QPixmap(\"img/logo.png\"))\n self.label_4.setScaledContents(True)\n self.label_4.setObjectName(\"label_4\")\n self.label_9 = QtWidgets.QLabel(self.frame)\n self.label_9.setGeometry(QtCore.QRect(44, 110, 201, 51))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(20)\n font.setBold(True)\n font.setWeight(75)\n self.label_9.setFont(font)\n self.label_9.setStyleSheet(\"color: rgb(255, 255, 255);\")\n self.label_9.setObjectName(\"label_9\")\n\n self.dashboardBtn = QtWidgets.QPushButton(self.frame)\n self.dashboardBtn.setGeometry(QtCore.QRect(40, 200, 221, 31))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(12)\n self.dashboardBtn.setFont(font)\n self.dashboardBtn.setStyleSheet(\n \" background-color: transparent;\\n\" \"color: white;\\n\" \"\"\n )\n self.dashboardBtn.setStyleSheet(\n \" background-color: transparent;\\n\"\n \"color: white;\\n\"\n \"text-align: left;\\n\"\n \"\"\n )\n self.dashboardBtn.setObjectName(\"dashboardBtn\")\n\n self.logsBtn = QtWidgets.QPushButton(self.frame)\n self.logsBtn.setGeometry(QtCore.QRect(40, 250, 221, 31))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(12)\n self.logsBtn.setFont(font)\n self.logsBtn.setStyleSheet(\n \" background-color: transparent;\\n\" \"color: white;\\n\" \"\"\n )\n self.logsBtn.setStyleSheet(\n \" background-color: transparent;\\n\"\n \"color: white;\\n\"\n \"text-align: left;\\n\"\n \"\"\n )\n self.logsBtn.setObjectName(\"logsBtn\")\n\n self.detectionBtn = QtWidgets.QPushButton(self.frame)\n self.detectionBtn.setGeometry(QtCore.QRect(40, 300, 221, 31))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(12)\n self.detectionBtn.setFont(font)\n self.detectionBtn.setStyleSheet(\n \" background-color: transparent;\\n\" \"color: white;\\n\" \"\"\n )\n self.detectionBtn.setStyleSheet(\n \" background-color: transparent;\\n\"\n \"color: white;\\n\"\n \"text-align: left;\\n\"\n \"\"\n )\n self.detectionBtn.setObjectName(\"detectionBtn\")\n\n self.logoutBtn = QtWidgets.QPushButton(self.frame)\n self.logoutBtn.setGeometry(QtCore.QRect(40, 350, 221, 31))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(12)\n self.logoutBtn.setFont(font)\n self.logoutBtn.setStyleSheet(\n \" background-color: transparent;\\n\" \"color: white;\\n\" \"\"\n )\n self.logoutBtn.setStyleSheet(\n \" background-color: transparent;\\n\"\n \"color: white;\\n\"\n \"text-align: left;\\n\"\n \"\"\n )\n self.logoutBtn.setObjectName(\"logoutBtn\")\n self.logoutBtn.clicked.connect(self.open_login_page)\n\n self.frame_3 = QtWidgets.QFrame(self.centralwidget)\n self.frame_3.setGeometry(QtCore.QRect(261, -1, 2000, 61))\n self.frame_3.setStyleSheet(\"background-color: rgb(255, 255, 255);\")\n self.frame_3.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.frame_3.setFrameShadow(QtWidgets.QFrame.Raised)\n self.frame_3.setObjectName(\"frame_3\")\n self.label = QtWidgets.QLabel(self.frame_3)\n self.label.setGeometry(QtCore.QRect(20, 10, 671, 41))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(12)\n self.label.setFont(font)\n self.label.setObjectName(\"label\")\n\n self.label_10 = QtWidgets.QLabel(self.centralwidget)\n self.label_10.setGeometry(QtCore.QRect(310, 90, 270, 51))\n font = QtGui.QFont()\n font.setFamily(\"Arial\")\n font.setPointSize(15)\n font.setBold(True)\n font.setWeight(75)\n self.label_10.setFont(font)\n self.label_10.setStyleSheet(\"color:black;\")\n self.label_10.setObjectName(\"label_10\")\n\n MainWindow.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(MainWindow)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 1186, 21))\n self.menubar.setObjectName(\"menubar\")\n MainWindow.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(MainWindow)\n self.statusbar.setObjectName(\"statusbar\")\n MainWindow.setStatusBar(self.statusbar)\n\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n self.detectionBtn.clicked.connect(self.open_detection)\n self.dashboardBtn.clicked.connect(self.open_dashboard)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(\n _translate(\n \"MainWindow\",\n \"SUIT: School Uniform Identifier Technology using Object Detection\",\n )\n )\n self.label_9.setText(_translate(\"MainWindow\", \" SUIT\"))\n self.dashboardBtn.setText(_translate(\"MainWindow\", \"Dashboard\"))\n self.logsBtn.setText(_translate(\"MainWindow\", \"Improper Uniform Monitoring\"))\n self.detectionBtn.setText(_translate(\"MainWindow\", \"Detection\"))\n self.logoutBtn.setText(_translate(\"MainWindow\", \"Logout\"))\n\n self.label.setText(\n _translate(\n \"MainWindow\",\n \"SUIT: School Uniform Identifier Technology using Object Detection\",\n )\n )\n self.label_10.setText(_translate(\"MainWindow\", \"Captured Improper Uniform\"))\n\n def load_logs(self):\n if self.search_has_input:\n return\n\n print(\"Executed load_logs\")\n\n connection = pymysql.connect(\n host=\"localhost\", user=\"root\", password=\"\", db=\"suit_db\"\n )\n cursor = connection.cursor()\n\n delete_query = \"DELETE FROM tbl_logs WHERE date_log < %s\"\n week_ago = datetime.now() - timedelta(days=7)\n cursor.execute(delete_query, (week_ago.date(),))\n connection.commit()\n\n query = \"SELECT unif_detect_result, department, course, date_log, time_log FROM tbl_detect_log ORDER BY ID DESC\"\n cursor.execute(query)\n logs = cursor.fetchall()\n\n row_count = len(logs)\n column_count = 5 # Increase the column count for the image column\n\n self.tableWidget.setRowCount(row_count)\n self.tableWidget.setColumnCount(column_count)\n\n header_labels = [\n \"Detected\",\n \"Department\",\n \"Course\",\n \"Date\",\n \"Time\",\n ]\n self.tableWidget.setHorizontalHeaderLabels(header_labels)\n\n for row, log in enumerate(logs):\n unif_detect_reuslt, department, course, date_log, time_log = log\n\n self.tableWidget.setItem(row, 0, QTableWidgetItem(str(unif_detect_reuslt)))\n self.tableWidget.setItem(row, 1, QTableWidgetItem(str(department)))\n self.tableWidget.setItem(row, 2, QTableWidgetItem(str(course)))\n self.tableWidget.setItem(row, 3, QTableWidgetItem(str(date_log)))\n self.tableWidget.setItem(row, 4, QTableWidgetItem(str(time_log)))\n\n cursor.close()\n\n def search_logs(self):\n print(\"Department: \", self.deparmentComboBox.currentText())\n print(\"Type: \", self.detectedtypeComboBox.currentText())\n print(\"Date From: \", self.datefromDateEdit.date().toString(\"yyyy-MM-dd\"))\n print(\"Date To: \", self.datefromDateEdit.date().toString(\"yyyy-MM-dd\"))\n\n department_filter = self.deparmentComboBox.currentText()\n detectedtype_filter = self.detectedtypeComboBox.currentText()\n date_filter_from = self.datefromDateEdit.date().toString(\"yyyy-MM-dd\")\n date_filter_to = self.datetoDateEdit.date().toString(\"yyyy-MM-dd\")\n\n search_text = self.searchLineEdit.text()\n\n connection = pymysql.connect(\n host=\"localhost\", user=\"root\", password=\"\", db=\"suit_db\"\n )\n cursor = connection.cursor()\n\n if search_text:\n self.search_has_input = True\n else:\n self.search_has_input = False\n # QTimer.singleShot(1000, self.load_logs)\n\n query = \"\"\"\n SELECT\n unif_detect_result, department, course, date_log,\n time_log\n FROM tbl_detect_log\n WHERE 1=1\n \"\"\"\n data = []\n if department_filter != \"ALL\":\n query += \" AND department = %s\"\n data.append(department_filter)\n\n if detectedtype_filter != \"ALL\":\n query += \" AND unif_detect_result = %s\"\n data.append(detectedtype_filter)\n\n if date_filter_from and date_filter_to:\n query += \" AND date_log BETWEEN %s AND %s\"\n data.append(date_filter_from)\n data.append(date_filter_to)\n\n if search_text:\n query += \" AND (course LIKE %s)\"\n data.append(f\"%{search_text}%\")\n\n query += \" ORDER BY ID DESC\"\n\n print(\"Query\", query)\n print(\"Data\", data)\n\n cursor.execute(\n query,\n data,\n )\n logs = cursor.fetchall()\n\n row_count = len(logs)\n column_count = 5 # Increase the column count for the image column\n\n self.tableWidget.setRowCount(row_count)\n self.tableWidget.setColumnCount(column_count)\n self.tableWidget.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)\n\n header_labels = [\n \"Detected\",\n \"Department\",\n \"Course\",\n \"Date\",\n \"Time\",\n ]\n self.tableWidget.setHorizontalHeaderLabels(header_labels)\n\n for row, log in enumerate(logs):\n unif_detect_result, department, course, date_log, time_log = log\n\n self.tableWidget.setItem(row, 0, QTableWidgetItem(str(unif_detect_result)))\n self.tableWidget.setItem(row, 1, QTableWidgetItem(str(department)))\n self.tableWidget.setItem(row, 2, QTableWidgetItem(str(course)))\n self.tableWidget.setItem(row, 3, QTableWidgetItem(str(date_log)))\n self.tableWidget.setItem(row, 4, QTableWidgetItem(str(time_log)))\n\n cursor.close()\n connection.close()\n\n def start_loading_students(self):\n # Start loading the students initially\n self.search_logs()\n\n def open_detection(self):\n from UniformDetection import UniformDetectionWindow\n\n detection = UniformDetectionWindow()\n\n detection.uniform_detection_func()\n\n def open_dashboard(self):\n print(\"Opening Dashboard...\")\n self.MainWindow.hide()\n self.dashboard_window = QtWidgets.QMainWindow()\n self.ui = dashboard.Ui_Dashboard()\n self.ui.setupUi(self.dashboard_window)\n self.dashboard_window.show()\n\n def open_login_page(self):\n print(\"Opening Login Page...\")\n self.MainWindow.hide()\n self.login_window = QtWidgets.QMainWindow()\n self.ui = login.Ui_Form()\n self.ui.setupUi(self.login_window)\n self.login_window.show()\n\n def export_data_to_excel(self):\n # Connect to the MySQL database\n connection = pymysql.connect(\n host=\"localhost\", user=\"root\", password=\"\", database=\"suit_db\"\n )\n department_filter = self.deparmentComboBox.currentText()\n detectedtype_filter = self.detectedtypeComboBox.currentText()\n date_filter_from = self.datefromDateEdit.date().toString(\"yyyy-MM-dd\")\n date_filter_to = self.datetoDateEdit.date().toString(\"yyyy-MM-dd\")\n\n search_text = self.searchLineEdit.text()\n\n try:\n # Create a cursor object to execute SQL queries\n cursor = connection.cursor()\n\n # Retrieve data from the tbl_student table\n query = \"\"\"\n SELECT\n unif_detect_result, department, course, date_log,\n time_log\n FROM tbl_detect_log\n WHERE 1=1\n \"\"\"\n data = []\n if department_filter != \"ALL\":\n query += \" AND department = %s\"\n data.append(department_filter)\n\n if detectedtype_filter != \"ALL\":\n query += \" AND unif_detect_result = %s\"\n data.append(detectedtype_filter)\n\n if date_filter_from and date_filter_to:\n query += \" AND date_log BETWEEN %s AND %s\"\n data.append(date_filter_from)\n data.append(date_filter_to)\n\n if search_text:\n query += \" AND (course LIKE %s)\"\n data.append(f\"%{search_text}%\")\n\n print(\"Query\", query)\n print(\"Data\", data)\n\n query += \" ORDER BY ID DESC\"\n cursor.execute(query, data)\n student_data = cursor.fetchall()\n\n # Create a new Excel workbook and select the active sheet\n workbook = openpyxl.Workbook()\n sheet = workbook.active\n\n # Write the column headers\n sheet[\"A1\"] = \"Detected\"\n sheet[\"B1\"] = \"Department\"\n sheet[\"C1\"] = \"Course\"\n sheet[\"D1\"] = \"Date Log\"\n sheet[\"E1\"] = \"Time Log\"\n\n # Set column width for date columns\n date_columns = [\"D\", \"E\"] # Columns D and E represent the date columns\n for column in date_columns:\n sheet.column_dimensions[\n column\n ].width = 15 # Adjust the width as per your preference\n\n for row_index, student in enumerate(student_data, start=2):\n sheet.cell(row=row_index, column=1).value = student[0]\n sheet.cell(row=row_index, column=2).value = student[1]\n sheet.cell(row=row_index, column=3).value = student[2]\n sheet.cell(row=row_index, column=4).value = student[3]\n sheet.cell(row=row_index, column=5).value = student[4]\n\n # Save the Excel file\n current_datetime = datetime.now()\n formatted_datetime = current_datetime.strftime(\"%Y-%m-%d_%H-%M-%S\")\n self.file_name = f\"logs_data_{formatted_datetime}.xlsx\"\n\n # Save the Excel file inside the \"folder_path\" folder\n self.file_path = rf\"{self.folder_path}\\{self.file_name}\"\n\n # Create the \"logs\" folder if it doesn't exist\n if not os.path.exists(self.folder_path):\n os.makedirs(self.folder_path)\n # Save the Excel file\n workbook.save(self.file_path)\n print(\"Logs Data exported to Excel successfully!\")\n\n except Exception as e:\n print(\"Error exporting data to Excel:\", str(e))\n\n finally:\n # Close the cursor and connection\n cursor.close()\n connection.close()\n\n\nif __name__ == \"__main__\":\n app = QtWidgets.QApplication(sys.argv)\n MainWindow = QtWidgets.QMainWindow()\n ui = Logs()\n ui.setupUi(MainWindow)\n ui.search_logs()\n MainWindow.show()\n sys.exit(app.exec_())\n","repo_name":"pfdelfin98/SUIT","sub_path":"logs.py","file_name":"logs.py","file_ext":"py","file_size_in_byte":22288,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"20669485680","text":"\nimport models.satici\n\nfrom models.musteri import Musteri\nfrom models.urun import Urun\n\nbasilanTus = \"\"\nmusteri = models.musteri.Musteri\nsatici = models.satici.Satici\nurun = models.urun.Urun\n\ndef saticiSistemKayit():\n saticiAdi = (input(\n \"Lütfen kullanıcı adınızı giriniz: \")) # bundan sonra veritabanında kullanici adını kontrolü yapıcak sonra şifre.\n\n sifre = (input(\n \"Lütfen sifre giriniz: \")) # bundan sonra veritabanında kullanici adını kontrolü yapıcak sonra şifre.\n\n urunTuru = (input(\n \"Lütfen urun turu giriniz: \")) # bundan sonra veritabanında kullanici adını kontrolü yapıcak sonra şifre.\n\n satici1 = satici(saticiAdi, sifre, urunTuru)\n satici.saticiKayit(satici, satici1)\n satici.saticiEkrani(satici)\n\n\ndef musteriSistemKayit():\n musteriAdi = (input(\n \"Lütfen kullanıcı adınızı giriniz: \")) # bundan sonra veritabanında kullanici adını kontrolü yapıcak sonra şifre.\n\n sifre = (input(\n \"Lütfen sifre giriniz: \")) # bundan sonra veritabanında kullanici adını kontrolü yapıcak sonra şifre.\n\n\n musteri1 = musteri(musteriAdi, sifre)\n musteri.musteriKayit(musteri, musteri1)\n if(musteri.kayitKontrol == 1):\n musteri.musteriEkrani(musteri)\n\ndef kullaniciGiris():\n basilanTus = (input(\"Müşteri misiniz, Satıcı mısınız? (Müşteri iseniz 'y' , Satıcı iseniz 'n')\"))\n\n if(basilanTus == \"y\"):\n\n musteri.musteriGiris(musteri)\n if(musteri.kontrol == 1):\n musteri.musteriEkrani(musteri)\n\n else:\n\n satici.saticiGiris(satici)\n if(satici.kontrol == 1):\n satici.saticiEkrani(satici)\n\ndef urunEkleme():\n urunAdi = (input(\"Lütfen ürün adını giriniz: \"))\n\n urunFiyat = (input(\"Lütfen ürün fiyatını giriniz: \"))\n\n urunTuru = (input(\"Lütfen ürün türünüzü giriniz: \"))\n\n urunID = (input(\"Lütfen ürün ID'nizi giriniz: \"))\n\n urun1 = urun(urunAdi, urunFiyat, urunTuru, urunID)\n\n urun.urunKayit(urun, urun1)\n\n\nif __name__ == '__main__':\n\n basilanTus = (input(\"E-ticaret uygulamasına hoşgeldiniz!!!!!!! \\nSisteme kayıtlı mısınız?(Evet ise 'y', hayır ise 'n' tuşuna basınız)\"))\n if(basilanTus == \"y\"):\n kullaniciGiris()\n else:\n basilanTus = (input(\"Müşterimi yoksa satıcı mısınız?(Satıcı ise 'y' müşteri ise 'n'ye basınız\"))\n if(basilanTus == \"y\"):\n saticiSistemKayit()\n elif(basilanTus == \"n\"):\n musteriSistemKayit()","repo_name":"Emin-Yildiz/python_eTicaret","sub_path":"main.py","file_name":"main.py","file_ext":"py","file_size_in_byte":2515,"program_lang":"python","lang":"tr","doc_type":"code","stars":1,"dataset":"github-code","pt":"71"} +{"seq_id":"28162082557","text":"# -*- coding: utf-8 -*-\r\nfrom PyQt5.QtWidgets import (QDialog, QTableWidget, QHeaderView, QAbstractItemView, QSizePolicy,\r\n QVBoxLayout, QWidget, QHBoxLayout, QStatusBar, QPushButton, QTableWidgetItem)\r\nfrom model import Card, CardType, DeckType\r\nfrom widgets import ViewGenerator\r\nfrom PyQt5.QtCore import Qt, QItemSelectionModel, QPropertyAnimation\r\nimport copy\r\n\r\nLINE_HEIGHT = 70\r\n\r\n\r\n# 分牌弹窗\r\nclass DealCardsDialog(QDialog):\r\n def __init__(self, index, gameModel, deckType, parent):\r\n super().__init__(parent)\r\n\r\n # model\r\n self._gameModel = gameModel\r\n self._deckType = deckType\r\n # 布局不同\r\n if deckType == DeckType.Hand:\r\n self._playerModel = gameModel.Players[index]\r\n self._handListModel = copy.deepcopy(self._playerModel.handCardList)\r\n self._deployedListModel = copy.deepcopy(self._playerModel.DeployedCardList)\r\n\r\n self.resize(self._gameModel.Config['cardMaxNum'] * Card.WIDTH + 86, 450)\r\n self.setWindowTitle('分牌-->[玩家%s]' % self._playerModel.seatId)\r\n elif deckType == DeckType.PerDeploy:\r\n self._deployedListModel = copy.deepcopy(self._gameModel.DeployedCardList)\r\n\r\n self.resize(660, 630)\r\n self.setWindowTitle('分牌-->预分配牌')\r\n\r\n self.setWindowFlags(Qt.Window | Qt.WindowCloseButtonHint | Qt.MSWindowsFixedSizeDialogHint)\r\n self.iniUI()\r\n\r\n @property\r\n def handListModel(self):\r\n return self._handListModel\r\n\r\n @property\r\n def deployedListModel(self):\r\n return self._deployedListModel\r\n\r\n def iniUI(self):\r\n mLayout = QVBoxLayout()\r\n self.setLayout(mLayout)\r\n\r\n self.mLayout = mLayout\r\n self.initDeck()\r\n self.initPlayerCardsDeck()\r\n\r\n # 确认按钮\r\n btnGroup = QHBoxLayout()\r\n self.mLayout.addLayout(btnGroup)\r\n btnGroup.addStretch()\r\n confirmBtn = QPushButton('确认', self)\r\n confirmBtn.clicked.connect(self.onConfirmClick)\r\n btnGroup.addWidget(confirmBtn)\r\n self.confirmBtn = confirmBtn\r\n\r\n self.statusbar = QStatusBar()\r\n self.mLayout.addWidget(self.statusbar)\r\n self.statusbar.setSizeGripEnabled(False)\r\n\r\n # 初始化 手牌和预分配牌 牌组\r\n def initPlayerCardsDeck(self):\r\n # 表格布局\r\n tableWidget = QTableWidget()\r\n tableWidget.setColumnCount(1)\r\n tableWidget.horizontalHeader().setVisible(False)\r\n tableWidget.setParent(self)\r\n tableWidget.setObjectName('playerTable')\r\n tableWidget.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)\r\n tableWidget.verticalHeader().setStretchLastSection(True)\r\n tableWidget.setSelectionMode(QAbstractItemView.SingleSelection)\r\n self.mLayout.addWidget(tableWidget)\r\n self._playerTableWidget = tableWidget\r\n\r\n if self._deckType == DeckType.Hand:\r\n self.addHandDeck()\r\n elif self._deckType == DeckType.PerDeploy:\r\n self.addPerDeployDeck()\r\n\r\n modelIndex = self._playerTableWidget.model().index(0, 0)\r\n self._playerTableWidget.selectionModel().select(modelIndex, QItemSelectionModel.Select)\r\n\r\n def addHandDeck(self):\r\n curRow = self._playerTableWidget.rowCount()\r\n self._playerTableWidget.insertRow(curRow)\r\n self._playerTableWidget.setVerticalHeaderItem(curRow, QTableWidgetItem(DeckType.Hand.label))\r\n self._playerTableWidget.setRowHeight(curRow, LINE_HEIGHT)\r\n handView = ViewGenerator.createModelDeckView(self._handListModel)\r\n handView.deckType = DeckType.Hand\r\n handView.dropDownSign.connect(self.dropInDeckView)\r\n handView.row = curRow\r\n handView.initCards(self._handListModel, self.onCardClick, maxWidth=self.width() - 60)\r\n self._playerTableWidget.setCellWidget(curRow, 0, handView)\r\n self.updateHeaderTextCount(handView)\r\n\r\n def addPerDeployDeck(self):\r\n curRow = self._playerTableWidget.rowCount()\r\n self._playerTableWidget.insertRow(curRow)\r\n self._playerTableWidget.setVerticalHeaderItem(curRow, QTableWidgetItem(DeckType.Hand.label))\r\n deployedView = ViewGenerator.createModelDeckView(self._deployedListModel)\r\n deployedView.deckType = DeckType.PerDeploy\r\n deployedView.dropDownSign.connect(self.dropInDeckView)\r\n deployedView.row = curRow\r\n deployedView.initCards(self._deployedListModel, self.onCardClick, maxWidth=self.width() - 60)\r\n self._playerTableWidget.setCellWidget(curRow, 0, deployedView)\r\n self.updateHeaderTextCount(deployedView)\r\n\r\n # 初始化牌组\r\n def initDeck(self):\r\n tableWidget = QTableWidget()\r\n tableWidget.setColumnCount(1)\r\n tableWidget.horizontalHeader().setVisible(False)\r\n tableWidget.setParent(self)\r\n tableWidget.setObjectName('deckTable')\r\n tableWidget.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)\r\n tableWidget.setSelectionMode(QAbstractItemView.NoSelection) # 不能选择\r\n self.mLayout.addWidget(tableWidget)\r\n\r\n # 初始化牌数据\r\n labels = []\r\n cards = []\r\n for item in self._gameModel.Config['cards']:\r\n labels.append(item['label'])\r\n cards.append(item['content'])\r\n tableWidget.setRowCount(len(labels))\r\n tableWidget.setFixedHeight((LINE_HEIGHT + 1) * len(labels))\r\n tableWidget.setVerticalHeaderLabels(labels)\r\n for i, line in enumerate(cards):\r\n cardWidget = QWidget()\r\n lineLayout = QHBoxLayout()\r\n cardWidget.setLayout(lineLayout)\r\n lineLayout.setSpacing(10)\r\n tableWidget.setRowHeight(i, LINE_HEIGHT)\r\n tableWidget.setCellWidget(i, 0, cardWidget)\r\n\r\n for index, cardValue in enumerate(line):\r\n cardModel = Card(cardValue, CardType.InitCard)\r\n cardView = ViewGenerator.createCardView(cardModel)\r\n cardView.mousePressSign.connect(self.onAddCardClick)\r\n lineLayout.addWidget(cardView)\r\n lineLayout.addStretch()\r\n\r\n def dropInDeckView(self, deckView, event):\r\n cardView = event.source()\r\n cardModel = cardView.model\r\n if cardModel.type == CardType.HandCard:\r\n # 调整手牌中牌的位置\r\n if cardView.deckView != deckView:\r\n return\r\n # 插入到拖到牌的位置\r\n cardSize = cardView.size()\r\n for cv in deckView.cardViews:\r\n if cv != cardView:\r\n crossPos = cv.pos() - event.pos()\r\n if cardSize.width() > abs(crossPos.x()) and cardSize.height() > abs(crossPos.y()):\r\n deckView.insertCard(cardView, cv)\r\n break\r\n elif cardModel.type == CardType.InitCard:\r\n # 从牌堆中拖出的height牌\r\n self.addCardToDeckView(cardModel, deckView)\r\n\r\n def addCardToDeckView(self, cardModel, deckView):\r\n if deckView.deckType == DeckType.Hand:\r\n cardViewListModel = deckView.model\r\n if len(cardViewListModel.lists) >= self._gameModel.Config['cardMaxNum']:\r\n self.statusbar.showMessage('已到达最大牌数[%d张]' % self._gameModel.Config['cardMaxNum'], 2000)\r\n return\r\n handCardView = ViewGenerator.createCardView(Card(cardModel.value, CardType.HandCard))\r\n elif deckView.deckType == DeckType.PerDeploy:\r\n handCardView = ViewGenerator.createCardView(Card(cardModel.value, CardType.DealCard))\r\n handCardView.mousePressSign.connect(self.onCardClick)\r\n deckView.addCard(handCardView)\r\n self.updateHeaderTextCount(deckView)\r\n\r\n def updateHeaderTextCount(self, deckView):\r\n # 更新牌数提示\r\n count = len(deckView.model.lists)\r\n text = '%s\\n[%d]' % (deckView.deckType.label, count)\r\n if count == 0:\r\n text = deckView.deckType.label\r\n self._playerTableWidget.verticalHeaderItem(deckView.row).setText(text)\r\n\r\n # 动画效果修改窗体大小\r\n def changeSize(self, size):\r\n self.animation = QPropertyAnimation(self, b'geometry')\r\n currentGeometry = self.geometry()\r\n currentGeometry.setSize(size)\r\n\r\n self.animation.setDuration(200)\r\n self.animation.setStartValue(self.geometry())\r\n self.animation.setEndValue(currentGeometry)\r\n self.animation.start()\r\n\r\n def onCardClick(self, cardView, event):\r\n if event.buttons() == Qt.RightButton:\r\n # remove card\r\n deckView = cardView.deckView\r\n if deckView:\r\n deckView.removeCard(cardView)\r\n self.updateHeaderTextCount(deckView)\r\n\r\n def onAddCardClick(self, cardView, event):\r\n if event.buttons() == Qt.RightButton:\r\n # add card\r\n # 只能单选\r\n if len(self._playerTableWidget.selectedIndexes()) > 0:\r\n modelIndex = self._playerTableWidget.selectedIndexes()[0]\r\n deckView = self._playerTableWidget.cellWidget(modelIndex.row(), modelIndex.column())\r\n self.addCardToDeckView(cardView.model, deckView)\r\n\r\n def onConfirmClick(self):\r\n # 改变赋值\r\n if self._deckType == DeckType.Hand:\r\n if self._playerModel.handCardList == self._handListModel and \\\r\n self._playerModel.DeployedCardList == self._deployedListModel:\r\n self.reject()\r\n else:\r\n self._playerModel.handCardList = self._handListModel\r\n self._playerModel.DeployedCardList = self._deployedListModel\r\n self.accept()\r\n elif self._deckType == DeckType.PerDeploy:\r\n if self._gameModel.DeployedCardList == self._deployedListModel:\r\n self.reject()\r\n else:\r\n self._gameModel.DeployedCardList = self._deployedListModel\r\n self.accept()\r\n","repo_name":"ThunderstormsZJ/Python","sub_path":"PyQT/worktools/widgets/DealCardsDialog.py","file_name":"DealCardsDialog.py","file_ext":"py","file_size_in_byte":10096,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"8462440713","text":"from collections import defaultdict\nclass Solution(object):\n def findItinerary(self, tickets):\n targets = defaultdict(list)\n for a, b in sorted(tickets)[::-1]:\n targets[a] += b,\n route, stack = [], ['JFK']\n while stack:\n while targets[stack[-1]]:\n stack += targets[stack[-1]].pop(),\n route += stack.pop(),\n return route[::-1]\n","repo_name":"brownlzw/Leetcode-python-solution","sub_path":"332. Reconstruct Itinerary.py","file_name":"332. Reconstruct Itinerary.py","file_ext":"py","file_size_in_byte":412,"program_lang":"python","lang":"en","doc_type":"code","stars":1,"dataset":"github-code","pt":"71"} +{"seq_id":"12363936009","text":"from typing import Any\r\n\r\n\r\nclass Node:\r\n value: Any\r\n next: 'Node'\r\n\r\n def __init__(self, value):\r\n self.value = value\r\n self.next = None\r\n\r\n\r\nclass LinkedList:\r\n head: Node\r\n tail: Node\r\n\r\n def __init__(self):\r\n self.head = None\r\n self.tail = None\r\n\r\n def push(self, value: Any) -> None:\r\n if self.head is None:\r\n self.head = Node(value)\r\n self.tail = self.head\r\n else:\r\n new_head = Node(value)\r\n new_head.next = self.head\r\n self.head = new_head\r\n\r\n def append(self, value: Any) -> None:\r\n if self.tail is None:\r\n self.head = Node(value)\r\n self.tail = self.head\r\n else:\r\n self.tail.next = Node(value)\r\n self.tail = self.tail.next\r\n\r\n def node(self, at: int) -> Node:\r\n node1 = self.head\r\n for i in range(at):\r\n node1 = node1.next\r\n return node1\r\n\r\n def insert(self, value: Any, after: Node) -> None:\r\n new_node = Node(value)\r\n new_node.next = after.next\r\n after.next = new_node\r\n\r\n def pop(self) -> Any:\r\n previous_head = self.head\r\n new_head = self.head.next\r\n self.head = new_head\r\n return previous_head\r\n\r\n def remove_last(self) -> Any:\r\n new_tail = self.head\r\n while 1:\r\n node1 = new_tail.next\r\n if node1 == self.tail:\r\n break\r\n new_tail = node1\r\n new_tail.next = None\r\n self.tail = new_tail\r\n return node1\r\n\r\n def remove(self, after: Node) -> Any:\r\n removed = after.next\r\n after.next = removed.next\r\n\r\n def __str__(self) -> str:\r\n string: str = ''\r\n data = self.head\r\n while data is not None:\r\n string = string + str(data.value)\r\n data = data.next\r\n if data is not None:\r\n string = string + ' -> '\r\n return string\r\n\r\n def __len__(self):\r\n length = 0\r\n if self.head is not None:\r\n head = self.head\r\n length += 1\r\n while 1:\r\n length += 1\r\n head = head.next\r\n if head.next is None:\r\n break\r\n return length\r\n\r\n\r\nclass Stack:\r\n storage: LinkedList\r\n\r\n def __init__(self):\r\n self.storage = None\r\n\r\n def push(self, elements: Any) -> None:\r\n if self.storage is None:\r\n self.storage = LinkedList()\r\n self.storage.push(elements)\r\n else:\r\n self.storage.push(elements)\r\n\r\n def pop(self) -> Any:\r\n node = self.storage.pop()\r\n return node.value\r\n\r\n def __str__(self) -> str:\r\n string: str = ''\r\n if self.storage is not None:\r\n node = self.storage.head\r\n while 1:\r\n string = string + str(node.value) + '\\n'\r\n if node.next is None:\r\n break\r\n node = node.next\r\n return string\r\n\r\n def __len__(self):\r\n length = 0\r\n if self.storage is not None:\r\n pile = self.storage.head\r\n while 1:\r\n length += 1\r\n if pile.next is None:\r\n break\r\n pile = pile.next\r\n return length\r\n\r\n\r\nstack = Stack()\r\nassert len(stack) == 0\r\nstack.push(3)\r\nstack.push(10)\r\nstack.push(1)\r\nassert len(stack) == 3\r\ntop_value = stack.pop()\r\nassert top_value == 1\r\nassert len(stack) == 2\r\n","repo_name":"dawid-wol/AiSD","sub_path":"Lab2/Zad2.py","file_name":"Zad2.py","file_ext":"py","file_size_in_byte":3516,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"37524802497","text":"import gym\nimport numpy as np\nimport os\nimport torch\n\nfrom reprise.action_inference import ActionInference\nfrom reprise.context_inference import ContextInference\nfrom reprise.gym.rocketball.agent import Agent\n\nfrom .model import LSTM\n\n\nTEST_ROOT = os.path.join(os.path.pardir, os.path.dirname(__file__))\n\ncontext_size = 3\naction_size = 4\noutput_size = 2\ninput_size = context_size + action_size + output_size\nhidden_size = 8\nai_horizon = 10\ncriterion = torch.nn.MSELoss()\n\n\ndef ci_loss(outputs, targets):\n return criterion(torch.cat(outputs, dim=0),\n torch.cat(targets, dim=0))\n\n\ndef ai_loss(outputs, targets):\n return criterion(torch.cumsum(\n torch.cat(outputs, dim=0), dim=0), targets)\n\n\ndef test_reprise():\n np.random.seed(123)\n torch.manual_seed(123)\n\n model = LSTM(input_size, hidden_size, 1, output_size)\n lstm_h = torch.zeros(1, 1, hidden_size)\n lstm_c = torch.zeros(1, 1, hidden_size)\n lstm_state = [lstm_h, lstm_c]\n lstm_state_ci = [lstm_h.clone(), lstm_c.clone()]\n\n policy = torch.rand([ai_horizon, 1, action_size])\n optimizer = torch.optim.Adam(\n [policy], lr=0.1, betas=(0.9, 0.999))\n ai = ActionInference(\n model=model,\n policy=policy,\n optimizer=optimizer,\n inference_cycles=3,\n criterion=ai_loss,\n reset_optimizer=False,\n policy_handler=lambda x: x)\n\n context = torch.zeros([1, 1, context_size])\n def opt_accessor(state): return [state[0], state[1]]\n params = [{'params': [context], 'lr': 0.1},\n {'params': opt_accessor(lstm_state)}]\n optimizer = torch.optim.Adam(params)\n ci = ContextInference(\n model=model,\n initial_model_state=lstm_state_ci,\n context=context,\n optimizer=optimizer,\n inference_length=5,\n inference_cycles=5,\n criterion=ci_loss,\n reset_optimizer=False,\n opt_accessor=opt_accessor,\n context_handler=lambda x: x)\n\n position = torch.Tensor([[[0, 1]]])\n targets = torch.cat(\n ai_horizon *\n [torch.Tensor(\n [[np.random.uniform(-1.5, 1.5),\n np.random.uniform(0, 2)]])])\n targets = targets[:, None, :]\n delta = torch.zeros([1, 1, 2])\n\n env = gym.make('reprise.gym:rocketball-v0')\n env.reset()\n agent = Agent(id='foo', mode=0, init_pos=np.array([0, 1]), color='black')\n agent.update_target(targets[0][0].numpy())\n env.add_agent(agent)\n\n action = torch.zeros([4])\n\n all_actions = []\n all_contexts = []\n\n for t in range(50):\n observation = env.step([action.numpy()])\n position_old = position.clone()\n position = torch.Tensor(observation[0][0][1])\n position = position[None, None, :]\n delta_old = delta.clone()\n delta = position - position_old\n\n x_t = torch.zeros([1, 1, input_size])\n x_t[0, 0, :context_size] = context.detach()\n x_t[0, 0, context_size:context_size + action_size] = action\n x_t[0, 0, -output_size:] = delta_old\n\n with torch.no_grad():\n y_t, lstm_state = model.forward(x_t, lstm_state)\n context, _, states = ci.infer_contexts(\n x_t[:, :, context_size:], delta)\n lstm_state = (\n states[-1][0].clone().detach(),\n states[-1][1].clone().detach())\n policy, _, _ = ai.infer_actions(\n delta, lstm_state, context.clone().detach().repeat(\n policy.shape[0], 1, 1), targets - position)\n action = policy[0][0].detach()\n\n all_contexts.append(context.detach())\n all_actions.append(action.detach())\n\n contexts_file = os.path.join(TEST_ROOT, 'references/test_reprise_contexts.npy')\n actions_file = os.path.join(TEST_ROOT, 'references/test_reprise_actions.npy')\n\n contexts_ref = np.load(open(contexts_file, 'rb'))\n actions_ref = np.load(open(actions_file, 'rb'))\n\n assert np.allclose(contexts_ref, torch.stack(all_contexts, dim=0).numpy(), rtol=1e-04)\n assert np.allclose(actions_ref, torch.stack(all_actions, dim=0).numpy(), rtol=1e-04)\n","repo_name":"CognitiveModeling/reprise","sub_path":"tests/test_reprise.py","file_name":"test_reprise.py","file_ext":"py","file_size_in_byte":4062,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"44213003302","text":"from .actions import MessageActions\nfrom .message_document_info import SVM_MsgdocInfo\n\n\nclass MessageParser:\n def __init__(self, message_text: str, message_info: SVM_MsgdocInfo):\n self._message_text = message_text\n self.actions = {}\n self._links = []\n self._init_links(message_info)\n\n def _init_links(self, message_info: SVM_MsgdocInfo) -> None:\n if not message_info.entities:\n return\n self._links = [\n e[\"url\"] for e in message_info.entities if e[\"type\"] == \"text_link\"\n ]\n\n def parse(self) -> None:\n if not self._message_text and not self._links:\n return\n for code, action in MessageActions.CUSTOM_ACTION_BY_CODE.items():\n found_data = action.parse(self._message_text, self._links)\n if found_data:\n self.actions[code] = {\"data\": found_data}\n","repo_name":"kehlerr/cerrrbot","sub_path":"bot/services/savmes/message_parser.py","file_name":"message_parser.py","file_ext":"py","file_size_in_byte":885,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"30885115939","text":"import sys\nfrom PyQt5.QtWidgets import QApplication, QWidget\nfrom PyQt5.QtGui import QPainter, QPen\nfrom PyQt5.QtCore import Qt, QTimer\nfrom math import cos, sin, pi, sqrt\nfrom random import randint, shuffle\n\nresx, resy = int(input('Horizontal resolution: ')), int(input('Vertical resolution: '))\npresets = [\t[[70, 30, 4, 4, 6], 'Harmonic'],\n\t\t\t[[80, 27, 3, 7, 9], 'Fast'],\n\t\t\t[[100, 20, 3, 8, 10], 'Very fast'],\n\t\t\t[[60, 50, 3, 3, 7], 'Many'],\n\t\t\t[[50, 100, 3, 4, 5], 'Plenty'],\n\t\t\t[[40, 150, 3, 4, 6], 'Nu ochen mnogo'],\n\t\t\t[[70, 20, 3, 11, 11], 'SPEED']]\nprint()\nif int(input('0 - Presets\\n1 - Custom\\n')):\n\ta = int(input('\\nSquare side length: '))\n\tN = int(input('Number of squares: '))\n\tconnectivity = int(input('Max number of near squares for every square (on start): '))\n\tvmin, vmax = float(input('Min speed (pi/2 per second): ')), float(input('Max speed: '))\nelse:\n\tprint('\\n---Presets---')\n\tfor i in range(len(presets)): print(i, '-', presets[i][1])\n\ta, N, connectivity, vmin, vmax = presets[int(input())][0]\n\nbr = \t[\n\t\t\t[[1, 0], [1, 1], [0, 1]],\n\t\t\t[[-1, 0], [-1, 1], [0, 1]],\n\t\t\t[[-1, 0], [-1, -1], [0, -1]],\n\t\t\t[[0, -1], [1, -1], [1, 0]]\n\t\t]\ngx_change90, gy_change90 \t= [[-1, 0, 1, 0], [0, -1, 0, 1]], [[0, -1, 0, 1], [1, 0, -1, 0]]\ngx_change180, gy_change180 \t= [-1, -1, 1, 1], [1, -1, -1, 1]\nbindings = [[0, 1], [1, 2], [2, 3], [0, 3]]\ndots0 = [[-a/2, a/2], [-a/2, -a/2], [a/2, -a/2], [a/2, a/2]]\na90 = pi/2\n\ndef sign(a):\n\tif a < 0: return -1\n\treturn 1\n\ndef coherent(m):\n\tN = len(m)\n\tvisited, visiting = [m[0]], [m[0]]\n\twhile len(visited) != N:\n\t\tv = visiting\n\t\tvisiting = []\n\t\tfor c in v:\n\t\t\tfor ngc in m:\n\t\t\t\tif ((abs(c[0]-ngc[0]) == 1 and c[1] == ngc[1]) or (abs(c[1]-ngc[1]) == 1 and c[0] == ngc[0])) and not (ngc in visited):\n\t\t\t\t\tvisiting.append(ngc)\n\t\t\t\t\tvisited.append(ngc)\n\t\tif not visiting: return len(visited) == N\n\treturn True\n\ndef near_squares(s, squares):\n\tns = []\n\tfor square in squares:\n\t\tif (abs(square[0]-s[0]) == 1 and square[1] == s[1]) or (abs(square[1]-s[1]) == 1 and square[0] == s[0]): ns.append(square)\n\treturn ns\n\nclass Square():\n\t__slots__ = ['dots', 'x', 'y', 'gx', 'gy', 'plan_rotate_data', 'rotating']\n\tdef __init__(self, x, y):\n\t\tself.dots = [[x-a/2, y+a/2], [x-a/2, y-a/2], [x+a/2, y-a/2], [x+a/2, y+a/2]]\n\t\tself.x, self.y, self.gx, self.gy = x, y, x//a, y//a\n\t\tself.plan_rotate_data = [0, 0, 0, 1]\n\t\tself.rotating = False\n\tdef elemental_rotate(self, rdot, angle):\n\t\tc, s = cos(angle), sin(angle)\n\t\tfor dot in self.dots:\n\t\t\tdot[0], dot[1] = (dot[0] - rdot[0] - self.x)*c - (dot[1] - rdot[1] - self.y)*s + rdot[0] + self.x, (dot[0] - rdot[0] - self.x)*s + (dot[1] - rdot[1] - self.y)*c + rdot[1] + self.y\n\tdef draw(self, painter):\n\t\tfor binding in bindings: painter.drawLine(self.dots[binding[0]][0], self.dots[binding[0]][1], self.dots[binding[1]][0], self.dots[binding[1]][1])\n\tdef check90_180(self, rdot0_index, squares, cad = 1):\n\t\tmagic = cad*2 - 1\n\t\tfor square in squares:\n\t\t\tif [magic*(square.gx - self.gx), magic*(square.gy - self.gy)] in br[rdot0_index]: return\n\t\tswto = [[square.gx, square.gy] for square in squares]\n\t\tswto.remove([self.gx, self.gy])\n\t\tgx90 = self.gx + gx_change90[cad][rdot0_index]\n\t\tgy90 = self.gy + gy_change90[cad][rdot0_index]\n\t\tgx180 = self.gx + gx_change180[rdot0_index]\n\t\tgy180 = self.gy + gy_change180[rdot0_index]\n\t\tp90 = coherent(swto + [[gx90, gy90]])\n\t\tif not (0 < gx90 < resx // a and 0 < gy90 < resy // a): return\n\t\tp180 = True\n\t\tfor square in squares:\n\t\t\tif square != self and ([magic*(square.gx - gx90), magic*(square.gy - gy90)] in br[(rdot0_index + magic) % 4]):\n\t\t\t\tp180 = False\n\t\t\t\tbreak\n\t\tif p180: p180 = coherent(swto + [[gx180, gy180]]) and 0 < gx180 < resx // a and 0 < gy180 < resy // a\n\t\tif randint(0, 1) and p90: return a90 * magic\n\t\telif p180: return pi * magic\n\tdef plan_possible_rotate(self, squares):\n\t\tfor square in squares:\n\t\t\tif ((square.gx - self.gx)**2 + (square.gy - self.gy)**2 < 12) and square.rotating: return\n\t\tangle = []\n\t\tfor rdot in self.dots:\n\t\t\tok = False\n\t\t\tfor square in squares:\n\t\t\t\tif rdot in square.dots and square != self:\n\t\t\t\t\tok = True\n\t\t\t\t\tbreak\n\t\t\tif not ok: continue\n\t\t\trdot0_index = dots0.index([rdot[0] - self.x, rdot[1] - self.y])\n\t\t\tang = [self.check90_180(rdot0_index, squares), self.check90_180(rdot0_index, squares, 0)]\n\t\t\tif ang[0]:\n\t\t\t\tif ang[1]: angle.append([ang[randint(0,1)], rdot, rdot0_index])\n\t\t\t\telse: angle.append([ang[0], rdot, rdot0_index])\n\t\t\telse:\n\t\t\t\tif ang[1]: angle.append([ang[1], rdot, rdot0_index])\n\t\tif angle:\n\t\t\tangle = angle[randint(0, len(angle) - 1)]\n\t\t\tcad = (sign(angle[0]) + 1)//2\n\t\t\tif abs(angle[0]) == a90:\n\t\t\t\tself.gx += gx_change90[cad][angle[2]]\n\t\t\t\tself.gy += gy_change90[cad][angle[2]]\n\t\t\telse:\n\t\t\t\tself.gx += gx_change180[angle[2]]\n\t\t\t\tself.gy += gy_change180[angle[2]]\n\t\t\tself.plan_rotate_data = [dots0[self.dots.index(angle[1])], abs(angle[0]), int(abs(angle[0])/pi*180/randint(vmin, vmax)), sign(angle[0])]\n\t\t\tself.rotating = True\n\tdef rotate(self):\n\t\tif self.plan_rotate_data[2] > 0:\n\t\t\tself.elemental_rotate(self.plan_rotate_data[0], self.plan_rotate_data[3]*self.plan_rotate_data[1]/self.plan_rotate_data[2])\n\t\t\tself.plan_rotate_data[1] -= self.plan_rotate_data[1]/self.plan_rotate_data[2]\n\t\t\tself.plan_rotate_data[2] -= 1\n\t\t\tif self.plan_rotate_data[2] <= 0:\n\t\t\t\tfor dot in self.dots: dot[0], dot[1] = round(dot[0]), round(dot[1])\n\t\t\t\tself.x, self.y = self.gx*a, self.gy*a\n\t\t\t\tself.dots = [[self.x-a/2, self.y+a/2], [self.x-a/2, self.y-a/2], [self.x+a/2, self.y-a/2], [self.x+a/2, self.y+a/2]]\n\t\t\t\tself.rotating = False\n\t\t\t\treturn 1\n\t\telse: return 1\n\t\treturn 0\n\tdef ROTATE(self, squares):\n\t\tif self.rotating: self.rotate()\n\t\telse: self.plan_possible_rotate(squares)\n\ndef coherent_squares(N):\n\ts = [[resx//(2*a), resy//(2*a)]]\n\tfor i in range(N):\n\t\tok = False\n\t\ttry_n = 0\n\t\twhile not ok:\n\t\t\ttry_n += 1\n\t\t\tif try_n == 15000 // a: return False\n\t\t\tok = False\n\t\t\tngc = [randint(3, resx//a-3), randint(3, resy//a-3)]\n\t\t\twhile ngc in s: ngc = [randint(3, resx//a-3), randint(3, resy//a-3)]\n\t\t\tns = near_squares(ngc, s)\n\t\t\tok1 = True\n\t\t\tfor near_square in ns:\n\t\t\t\tif len(near_squares(near_square, s + [ngc])) > connectivity:\n\t\t\t\t\tok1 = False\n\t\t\t\t\tbreak\n\t\t\tif not ok1: continue\n\t\t\tfor c in s:\n\t\t\t\tif (abs(c[0]-ngc[0]) == 1 and c[1] == ngc[1]) or (abs(c[1]-ngc[1]) == 1 and c[0] == ngc[0]):\n\t\t\t\t\ts.append(ngc)\n\t\t\t\t\tok = True\n\t\t\t\t\tbreak\n\treturn [Square(gc[0]*a, gc[1]*a) for gc in s]\n\nclass drawer(QWidget):\n\tdef __init__(self):\n\t\tsuper().__init__()\n\t\tself.initiation()\n\t\tself.timer = QTimer()\n\t\tself.timer.timeout.connect(self.process_timeout)\n\t\tself.timer.start(10)\n\t\tself.update()\n\tdef process_timeout(self):\n\t\tfor square in self.squares: square.ROTATE(self.squares)\n\t\tself.update()\n\tdef initiation(self):\n\t\tself.setGeometry(0, 0, resx, resy)\n\t\tself.setWindowTitle('Quat')\n\t\tp = self.palette()\n\t\tp.setColor(self.backgroundRole(), Qt.black)\n\t\tself.setPalette(p)\n\t\tself.squares = False\n\t\twhile not self.squares: self.squares = coherent_squares(N)\n\t\tself.show()\n\tdef paintEvent(self, e):\n\t\tpainter = QPainter()\n\t\tpainter.begin(self)\n\t\tpainter.setPen(QPen(Qt.white, 1, Qt.SolidLine))\n\t\tpainter.drawLine(0, 0, resx, 0)\n\t\tpainter.drawLine(0, 0, 0, resy)\n\t\tpainter.drawLine(resx, 0, resx - 1, resy - 1)\n\t\tpainter.drawLine(0, resy, resx - 1, resy - 1)\n\t\tfor square in self.squares: square.draw(painter)\n\t\tpainter.end()\n\t\tself.show()\n\nif __name__ == '__main__':\n\tapp = QApplication(sys.argv)\n\tw = drawer()\n\tsys.exit(app.exec_())\n\n","repo_name":"MentalBlood/quat","sub_path":"quat.py","file_name":"quat.py","file_ext":"py","file_size_in_byte":7394,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"38926862869","text":"from app.db import db\nfrom flask import jsonify\nfrom marshmallow import ValidationError\nfrom app.person.document_type.entity.document_type_entity import DocumentTypeEntity\nfrom app.person.person.entity.person_entity import PersonEntity\nfrom app.person.document_type.schema.document_type_schema import document_type_schema, list_documents_type_schema\nfrom app.person.document_type.model.document_type_dto import DocumentTypeDTO\n\nDocumentTypeEntity.start_mapper()\n\ndef get_all_documents():\n data = db.session.query(DocumentTypeEntity).all()\n if not data:\n return {\"msg\":\"There are not persons\"}, 404\n result = list_documents_type_schema.dump(data)\n return jsonify({'data':result})\n\ndef save_document(data):\n try:\n document = document_type_schema.load(data)\n db.session.add(DocumentTypeDTO(name=document['name']))\n db.session.commit()\n return document\n except ValidationError as error:\n return {'error':error.args}\n except Exception as error:\n return {'error':error.args}\n","repo_name":"BrayanRoa/legacy_ayd_classroom_projects","sub_path":"app/person/document_type/service/document_type_service.py","file_name":"document_type_service.py","file_ext":"py","file_size_in_byte":1039,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"13919643523","text":"'''\ngutils.py\n'''\n\nimport time\n\nclass TicToc:\n\n def __init__(self):\n self.tic = time.time()\n self.toc = 0\n def __getattr__(self, now):\n if self.tic > 0:\n self.toc = time.time()\n return \"{:.4f}\".format(self.toc - self.tic)\n else:\n self.tic = time.time()\n\n\nif __name__ == \"__main__\":\n\n # let's implement it\n timer = TicToc()\n print(timer.now)\n print(timer.now)","repo_name":"marcomarchesi/giraffe","sub_path":"gutils.py","file_name":"gutils.py","file_ext":"py","file_size_in_byte":437,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"25224831301","text":"# encoding: utf-8\nimport datetime\n\n__all__ = [\n 'info',\n ]\n\ndef info():\n return {\n 'birthday': datetime.date(1991, 4, 20),\n 'class': 6,\n 'family_name_en': u'nonaka',\n 'family_name_kana': u'のなか',\n 'first_name_en': u'misato',\n 'first_name_kana': u'みさと',\n 'graduate_date': None,\n 'hometown': u'福冈',\n 'name_en': u'Nonaka Misato',\n 'name_ja': u'野中美郷',\n 'name_kana': u'のなか みさと',\n 'nick': u'みちゃ',\n 'team': u'K',\n }\n","repo_name":"moriyoshi/pyakb48","sub_path":"akb48/member/nonaka_misato.py","file_name":"nonaka_misato.py","file_ext":"py","file_size_in_byte":642,"program_lang":"python","lang":"zh","doc_type":"code","stars":8,"dataset":"github-code","pt":"71"} +{"seq_id":"73347538150","text":"from jaxtyping import Array, Float as F, UInt16 as U16, PyTree, jaxtyped\n\n@jaxtyped\ndef fwd_embedding(params: PyTree, x: U16[Array, '*dims']) -> F[Array, '*dims embed_size']:\n # params\n embedding: Array = params['embedding'] # array\n\n # check_type('embedding', embedding, F[Array, 'vocab_size embed_size'])\n\n y = embedding[x]\n # check_type('y', y, F[Array, '*dims embed_size'])\n\n return y\n","repo_name":"ayaka14732/TransCan","sub_path":"lib/model/fwd_embedding.py","file_name":"fwd_embedding.py","file_ext":"py","file_size_in_byte":408,"program_lang":"python","lang":"en","doc_type":"code","stars":34,"dataset":"github-code","pt":"71"} +{"seq_id":"2766058868","text":"from typing import List\nimport collections\n\n\nclass TreeNode:\n \"\"\"Definition for a binary tree node\n \"\"\" \n def __init__(self, val=0, left=None, right=None):\n self.val = val\n self.left = left\n self.right = right\n\ndef list_to_tree(arr):\n \"\"\"Generate a binary tree with a list\n\n Args:\n arr ([type]): [description]\n\n Returns:\n [type]: [description]\n \"\"\"\n if not arr:\n return\n i = 1\n root = TreeNode(int(arr[0]))\n queue = collections.deque()\n queue.append(root)\n while queue:\n node = queue.popleft()\n if i bool:\n def recur(A, B):\n if not B: return True\n if not A or A.val != B.val: return False\n return recur(A.left, B.left) and recur(A.right, B.right)\n\n\n return bool(A and B) and (recur(A, B) or self.isSubStructure(A.left, B) or self.isSubStructure(A.right, B))\n\n\n\nif __name__ == '__main__':\n # ======= Test Case =======\n A = [3,4,5,1,2]\n B = [4,1]\n # ====== Driver Code ======\n sol = Solution()\n A = list_to_tree(A)\n B = list_to_tree(B)\n res = sol.isSubStructure(A, B)\n print(res)","repo_name":"bizbard/leetcode-jianzhioffer-python","sub_path":"树的子结构.py","file_name":"树的子结构.py","file_ext":"py","file_size_in_byte":1987,"program_lang":"python","lang":"en","doc_type":"code","stars":0,"dataset":"github-code","pt":"71"} +{"seq_id":"11113080630","text":"import bpy\nimport bmesh\nfrom math import sqrt\nfrom mathutils import Vector, Matrix\nfrom random import random, seed, uniform, randint, randrange\n\nNO_SYMMETRY_INDEX = 99\n# Inspired by: https://github.com/a1studmuffin/SpaceshipGenerator\n\n\n# Get all faces connected to the given face\ndef get_connected_faces(face):\n connected_faces = []\n for edge in face.edges:\n for connected_face in edge.link_faces:\n if connected_face != face:\n connected_faces.append(connected_face)\n\n return connected_faces\n\n\n# Extrudes a face along its normal by length units, returns the new face\ndef extrude_face(bm, face, length):\n result = bmesh.ops.extrude_discrete_faces(bm, faces=[face])\n new_faces = result['faces']\n new_face = new_faces[0]\n bmesh.ops.translate(bm, vec=new_face.normal * length, verts=new_face.verts)\n return new_face\n\n\n# Similar to extrude_face, except corrigates the geometry to create \"ribs\".\n# Returns the new face.\ndef ribbed_extrude_face(bm, face, length, rib_count=3, rib_scale=0.9):\n rib_length = length / float(rib_count)\n new_face = face\n for i in range(rib_count):\n new_face = extrude_face(bm, new_face, rib_length * 0.25)\n new_face = extrude_face(bm, new_face, 0.0)\n scale_face(bm, new_face, rib_scale, rib_scale, rib_scale)\n new_face = extrude_face(bm, new_face, rib_length * 0.5)\n new_face = extrude_face(bm, new_face, 0.0)\n scale_face(bm, new_face, 1 / rib_scale, 1 / rib_scale, 1 / rib_scale)\n new_face = extrude_face(bm, new_face, rib_length * 0.25)\n return new_face\n\n\n# Returns the rough length and width of a quad face.\n# Assumes a perfect rectangle, but close enough.\ndef get_face_width_and_height(face):\n if not face.is_valid or len(face.verts[:]) < 4:\n return -1, -1\n width = (face.verts[2].co - face.verts[1].co).length\n height = (face.verts[0].co - face.verts[1].co).length\n return width, height\n\n\n# Returns the rough aspect ratio of a face. Always >= 1.\ndef get_aspect_ratio(face):\n if not face.is_valid:\n return 1.0\n face_aspect_ratio = max(0.01, face.edges[0].calc_length() / face.edges[1].calc_length())\n if face_aspect_ratio < 1.0:\n face_aspect_ratio = 1.0 / face_aspect_ratio\n return face_aspect_ratio\n\n\n# Scales a face in local face space\ndef scale_face(bm, face, x, y, z):\n face_space = get_face_matrix(face)\n face_space.invert()\n bmesh.ops.scale(bm, vec=Vector((x, y, z)), space=face_space, verts=face.verts)\n\n\n# Given a face, splits it up into a smaller uniform grid and extrudes each grid cell.\ndef add_grid_to_face(bm, face):\n if not face.is_valid:\n return\n result = bmesh.ops.subdivide_edges(bm,\n edges=face.edges[:],\n cuts=randint(2, 4),\n fractal=0.02,\n use_grid_fill=True,\n use_single_edge=False)\n grid_length = uniform(0.025, 0.15)\n scale = 0.8\n for face in result['geom']:\n if isinstance(face, bmesh.types.BMFace):\n face = extrude_face(bm, face, grid_length)\n scale_face(bm, face, scale, scale, scale)\n\n\n# Given a face, adds some pointy intimidating antennas.\ndef add_surface_antenna_to_face(bm, face):\n if not face.is_valid or len(face.verts[:]) < 4:\n return\n horizontal_step = randint(4, 10)\n vertical_step = randint(4, 10)\n for h in range(horizontal_step):\n top = face.verts[0].co.lerp(\n face.verts[1].co, (h + 1) / float(horizontal_step + 1))\n bottom = face.verts[3].co.lerp(\n face.verts[2].co, (h + 1) / float(horizontal_step + 1))\n for v in range(vertical_step):\n if random() > 0.9:\n pos = top.lerp(bottom, (v + 1) / float(vertical_step + 1))\n face_size = sqrt(face.calc_area())\n depth = uniform(0.1, 1.5) * face_size\n depth_short = depth * uniform(0.02, 0.15)\n base_diameter = uniform(0.005, 0.05)\n\n # Spire\n num_segments = uniform(3, 6)\n result = bmesh.ops.create_cone(bm,\n cap_ends=False,\n cap_tris=False,\n segments=num_segments,\n diameter1=0,\n diameter2=base_diameter,\n depth=depth,\n matrix=get_face_matrix(face, pos + face.normal * depth * 0.5))\n\n # Base\n result = bmesh.ops.create_cone(bm,\n cap_ends=True,\n cap_tris=False,\n segments=num_segments,\n diameter1=base_diameter * uniform(1, 1.5),\n diameter2=base_diameter * uniform(1.5, 2),\n depth=depth_short,\n matrix=get_face_matrix(face, pos + face.normal * depth_short * 0.45))\n\n# Returns a rough 4x4 transform matrix for a face (doesn't handle\n# distortion/shear) with optional position override.\ndef get_face_matrix(face, pos=None):\n x_axis = (face.verts[1].co - face.verts[0].co).normalized()\n z_axis = -face.normal\n y_axis = z_axis.cross(x_axis)\n if not pos:\n pos = face.calc_center_bounds()\n\n # Construct a 4x4 matrix from axes + position:\n # http://i.stack.imgur.com/3TnQP.png\n mat = Matrix()\n mat[0][0] = x_axis.x\n mat[1][0] = x_axis.y\n mat[2][0] = x_axis.z\n mat[3][0] = 0\n mat[0][1] = y_axis.x\n mat[1][1] = y_axis.y\n mat[2][1] = y_axis.z\n mat[3][1] = 0\n mat[0][2] = z_axis.x\n mat[1][2] = z_axis.y\n mat[2][2] = z_axis.z\n mat[3][2] = 0\n mat[0][3] = pos.x\n mat[1][3] = pos.y\n mat[2][3] = pos.z\n mat[3][3] = 1\n return mat\n\n\n# Generates a short base with differently scaled segments\n# and optionally ribbed surfaces\ndef generate_base(bm, top_face):\n segments = randrange(1, 3)\n segment_range = range(segments)\n for i in segment_range:\n segment_length = uniform(0.2, 0.6)\n\n if random() > 0.45:\n top_face = extrude_face(bm, top_face, segment_length)\n\n segment_scale = uniform(0.6, 1.4)\n scale_face(bm, top_face, segment_scale, segment_scale, 1)\n else:\n rib_scale = uniform(0.75, 0.95)\n rib_count = randint(2, 4)\n top_face = ribbed_extrude_face(bm, top_face, segment_length, rib_count, rib_scale)\n\n return top_face\n\n\ndef generate_bearings(bm, top_face):\n dimple = uniform(-0.15, 0.0)\n top_face = extrude_face(bm, top_face, dimple)\n bearing_scale = uniform(0.3, 0.8)\n scale_face(bm, top_face, bearing_scale, bearing_scale, 1)\n\n top_face.material_index = NO_SYMMETRY_INDEX\n for f in get_connected_faces(top_face):\n f.material_index = NO_SYMMETRY_INDEX\n\n bearing_length = uniform(abs(dimple) + 0.1, abs(dimple) + 0.3)\n top_face = extrude_face(bm, top_face, bearing_length)\n\n # Make sure faces extruded from this face do not also get the NO_SYMMETRY_INDEX\n top_face.material_index = 0\n for f in get_connected_faces(top_face):\n f.material_index = NO_SYMMETRY_INDEX\n\n return (top_face, bearing_scale)\n\n\ndef generate_top(bm, top_face, bearing_scale):\n inv = 1.0 / bearing_scale\n top_face = extrude_face(bm, top_face, 0)\n scale_face(bm, top_face, inv, inv, 1)\n\n for f in get_connected_faces(top_face):\n f.material_index = NO_SYMMETRY_INDEX\n\n top_face = extrude_face(bm, top_face, uniform(0.5, 1.25))\n faces_list = get_connected_faces(top_face)\n return (top_face, faces_list)\n\n\ndef add_barrel(bm, face):\n face_width, face_height = get_face_width_and_height(face)\n nozzle_ratio = uniform(0.1, 0.25)\n nozzle_size = nozzle_ratio * min(face_width, face_height)\n nozzle_depth = uniform(0.5, 1.0)\n\n segments = randrange(1, 3)\n segment_range = range(segments)\n for i in segment_range:\n segment_length = uniform(0.1, 0.5)\n\n if random() > 0.45:\n face = extrude_face(bm, face, segment_length)\n segment_scale = nozzle_size * 2 * uniform(2.0, 3.5)\n scale_face(bm, face, segment_scale, segment_scale, 1)\n else:\n rib_scale = uniform(0.75, 0.95)\n rib_count = randint(2, 4)\n face = ribbed_extrude_face(bm, face, segment_length, rib_count, rib_scale)\n\n sphere_matrix = get_face_matrix(face,\n face.calc_center_bounds() + face.normal * nozzle_depth * 0.5)\n\n bmesh.ops.create_cone(bm,\n cap_ends=False,\n segments=12,\n diameter1=nozzle_size,\n diameter2=uniform(0.7, 1.3) * nozzle_size,\n depth=nozzle_depth,\n matrix=sphere_matrix)\n\n return face\n\n\ndef generate():\n current_seed = randint(0, pow(2, 31))\n print(\"Seed {0}\".format(current_seed))\n seed(current_seed)\n prefix = \"TURRET_\"\n\n # Remove previous iterations\n for b in bpy.data.objects:\n if b.name.startswith(prefix):\n print({\"INFO\"}, \"Removing {0}\".format(b.name))\n bpy.data.objects.remove(b)\n\n # Create a cone to start out with\n bm = bmesh.new()\n bmesh.ops.create_cone(\n bm,\n cap_ends=True,\n segments=6,\n diameter1=1.0,\n diameter2=1.0,\n depth=1.0)\n\n # Random scale\n scale_vector = Vector((1, 1, uniform(0.25, 0.5)))\n bmesh.ops.scale(bm, vec=scale_vector, verts=bm.verts)\n\n # Find the top facing face\n for face in bm.faces[:]:\n if face.normal.z > 0.5:\n top_face = face\n\n top_face = generate_base(bm, top_face)\n (top_face, bearing_scale) = generate_bearings(bm, top_face)\n (top_face, side_faces) = generate_top(bm, top_face, bearing_scale)\n\n asymmetry_faces = []\n grid_faces = []\n antenna_faces = []\n\n num_asymmetry_segments_min = 1\n num_asymmetry_segments_max = 3\n for face in bm.faces[:]:\n # Skip specifically excluded faces\n if face.material_index == NO_SYMMETRY_INDEX:\n continue\n # Skip any long thin faces as it'll probably look stupid\n if get_aspect_ratio(face) > 3:\n continue\n # Skip small faces\n #if face.calc_area() < 1.0:\n # face.select = True\n if random() > 0.9:\n asymmetry_faces.append(face)\n elif random() > 0.95:\n grid_faces.append(face)\n elif face.normal.z > 0.85:\n antenna_faces.append(face)\n\n # Add some large asymmetrical sections of the hull that stick out\n for face in asymmetry_faces:\n hull_piece_length = uniform(0.1, 0.4)\n for i in range(randrange(num_asymmetry_segments_min, num_asymmetry_segments_max)):\n face = extrude_face(bm, face, hull_piece_length)\n\n # Maybe apply some scaling\n if random() > 0.25:\n s = 1 / uniform(1.1, 1.5)\n scale_face(bm, face, s, s, s)\n\n # Find the heightest forward facing face\n best_face = None\n best_face_z = 0\n for face in bm.faces[:]:\n if face.normal.x > 0.99 and face.normal.z < 0.01 and face.normal.y < 0.01:\n face_z = face.calc_center_median()[2]\n if best_face is None or face_z > best_face_z:\n best_face = face\n best_face_z = face_z\n\n # Place the barrel\n add_barrel(bm, best_face)\n\n for face in grid_faces:\n add_grid_to_face(bm, face)\n\n for face in antenna_faces:\n add_surface_antenna_to_face(bm, face)\n\n # Finish up, write the bmesh into a new mesh\n mesh = bpy.data.meshes.new('Mesh')\n bm.to_mesh(mesh)\n bm.free()\n\n # Add the mesh to the scene\n scene = bpy.context.scene\n obj = bpy.data.objects.new(\"{0}body\".format(prefix), mesh)\n scene.objects.link(obj)\n\n # Select and make active\n scene.objects.active = obj\n obj.select = False\n\n # Recenter the object to its center of mass\n bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS')\n ob = bpy.context.object\n ob.location = (0, 0, 0)\n\n # Add a fairly broad bevel modifier to angularize shape\n bevel_modifier = ob.modifiers.new('Bevel', 'BEVEL')\n bevel_modifier.width = uniform(5, 20)\n bevel_modifier.offset_type = 'PERCENT'\n bevel_modifier.segments = 2\n bevel_modifier.profile = uniform(0.0, 0.5)\n bevel_modifier.limit_method = 'NONE'\n# bpy.ops.object.modifier_apply(modifier='Bevel', apply_as=\"DATA\")\n\n solidify_modifier = ob.modifiers.new('Solidify', 'SOLIDIFY')\n solidify_modifier.thickness = 0.03\n\n\n\ngenerate()\n","repo_name":"roy-t/MiniRTS","sub_path":"vOld/Scripts/ScratchPad.py","file_name":"ScratchPad.py","file_ext":"py","file_size_in_byte":13005,"program_lang":"python","lang":"en","doc_type":"code","stars":116,"dataset":"github-code","pt":"71"} +{"seq_id":"6625560761","text":"# импортируем библиотеки\r\nimport datetime # чтобы определять текущее время\r\nimport logging\r\nimport sqlite3\r\nimport asyncio\r\nfrom telegram.ext import Application\r\nfrom telegram.ext import CommandHandler\r\nfrom telegram import ReplyKeyboardMarkup\r\n\r\n# токен бота\r\nTOKEN = '6034472814:AAGQMRiI97yXXlIlok6yC0K08eCZTSILFf0'\r\n\r\n# добавляем кавиатуру\r\nreply_keyboard = [['/new', '/show_all'],\r\n ['/delete', '/delete_all'],\r\n ['/remind']]\r\nmarkup = ReplyKeyboardMarkup(reply_keyboard, one_time_keyboard=False)\r\n\r\n# создаем дб где будем хранить список задач\r\nconn = sqlite3.connect('tasks.db')\r\nc = conn.cursor()\r\nc.execute('''CREATE TABLE IF NOT EXISTS tasks\r\n (id INTEGER PRIMARY KEY AUTOINCREMENT,\r\n user_id INTEGER,\r\n task STRING,\r\n date STRING)''')\r\nconn.commit()\r\n\r\n# Запускаем логгирование\r\nlogging.basicConfig(\r\n format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.DEBUG\r\n)\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\n# начальная команда\r\nasync def start(update, context):\r\n user = update.effective_user\r\n await update.message.reply_html(\r\n f\"Привет {user.mention_html()}! Я твой тайм-менеджер, я помогу тебе сэкономить много драгоценного времени!\\n\"\r\n \"��тобы узнать список команд, напиши /help.\",\r\n reply_markup=markup\r\n )\r\n\r\n\r\n# команда помощи\r\nasync def help(update, context):\r\n user = update.effective_user\r\n await update.message.reply_text(\r\n \"/new