function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def test_view_person_othername_list_unauthorized(self): response = self.client.get("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/") self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_view_person_othername_details_unauthorized(self): response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/" ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_view_person_othername_details_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/" ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_othername_unauthorized(self): data = { "name": "jane", "family_name": "jambul", "given_name": "test person", "start_date": "1950-01-01", "end_date": "2010-01-01", } response = self.client.post( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/", data ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_othername_unauthorized(self): data = { "family_name": "jambul", } person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') other_name = person.other_names.language('en').get(id="cf93e73f-91b6-4fad-bf76-0782c80297a8") response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/", data ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_othername_authorized(self): data = { "family_name": "jambul", } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/", data ) self.assertEqual(response.status_code, status.HTTP_200_OK) person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') other_name = person.other_names.language('en').get(id="cf93e73f-91b6-4fad-bf76-0782c80297a8") self.assertEqual(other_name.family_name, "jambul")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_othername_unauthorized(self): response = self.client.delete( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/" ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_othername_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.delete( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/" ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_get_person_identifier_link_list_unauthorized(self): # identifier af7c01b5-1c4f-4c08-9174-3de5ff270bdb # link 9c9a2093-c3eb-4b51-b869-0d3b4ab281fd # person 8497ba86-7485-42d2-9596-2ab14520f1f4 response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/" ) self.assertEqual(response.status_code, status.HTTP_200_OK) data = response.data["results"][0] self.assertEqual(data["url"], "http://github.com/sinarproject/")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_get_person_identifier_link_detail_unauthorized(self): response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/" ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data["results"]["url"], "http://github.com/sinarproject/")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_get_person_identifier_link_detail_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/" ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data["results"]["url"], "http://github.com/sinarproject/")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_identifier_link_unauthorized(self): data = { "url": "http://twitter.com/sinarproject" } response = self.client.post( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/", data ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_identifier_link_unauthorized(self): data = { "note":"This is a nested link" } response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/", data ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_identifier_link_authorized(self): data = { "note":"This is a nested link" } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/", data ) self.assertEqual(response.status_code, status.HTTP_200_OK) # 9c9a2093-c3eb-4b51-b869-0d3b4ab281fd person = Person.objects.language("en").get(id="8497ba86-7485-42d2-9596-2ab14520f1f4") identifier = person.identifiers.language("en").get(id="af7c01b5-1c4f-4c08-9174-3de5ff270bdb") link = identifier.links.language("en").get(id="9c9a2093-c3eb-4b51-b869-0d3b4ab281fd") self.assertEqual(link.note, "This is a nested link")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_identifier_link_unauthorized(self): response = self.client.delete( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/" ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_identifier_link_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.delete( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/" ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_list_person_othername_link(self): response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/" ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_show_person_othername_link_detail(self): response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/" ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_othername_link_authorized(self): data = { "url": "http://github.com/sinar" } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/", data ) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_othername_link_not_exist_authorized(self): data = { "note": "Just a link" } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/not_exist/", data ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_othername_link_authorized(self): data = { "note": "Just a link" } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/", data ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_othername_link_not_exist_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.delete( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/not_exist/" ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_othername_link_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.delete( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/" ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_list_person_contact_link(self): response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/" ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_show_person_contact_link(self): response = self.client.get( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/6d0afb46-67d4-4708-87c4-4d51ce99767e/" ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_contact_link_authorized(self): data = { "url": "http://github.com/sinar" } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/", data ) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_contact_link_not_exist_authorized(self): data = { "note": "Just a link" } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/not_exist/", data ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_contact_link_authorized(self): data = { "note": "Just a link" } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/6d0afb46-67d4-4708-87c4-4d51ce99767e/", data ) self.assertEqual(response.status_code, status.HTTP_200_OK)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_contact_link_not_exist_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.delete( "/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/not_exist/" ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_fetch_non_empty_field_person_serializer(self): person = Person.objects.untranslated().get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') serializer = PersonSerializer(person, language='en') data = serializer.data self.assertEqual(data["name"], "John")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_fetch_not_empty_relation_person_serializer(self): person = Person.objects.untranslated().get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') serializer = PersonSerializer(person, language='en') data = serializer.data self.assertTrue(data["other_names"])
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_with_all_field_serializer(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "not john doe", "gender": "unknown", "summary": "person unit test api", "honorific_prefix": "Chief", "honorific_suffix": "of the fake people league", "biography": "He does not exists!!!!", "birth_date": "1950-01-01", "death_data": "2000-01-01", "email": "joejambul@sinarproject.org", "contact_details":[ { "type":"twitter", "value": "sinarproject", } ], "links":[ { "url":"http://sinarproject.org", } ], "identifiers":[ { "identifier": "9089098098", "scheme": "rakyat", } ], "other_names":[ { "name":"Jane", "family_name":"Jambul", "start_date": "1950-01-01", "end_date": "2010-01-01", } ] } person_serial = PersonSerializer(data=person_data, language='en') person_serial.is_valid() self.assertEqual(person_serial.errors, {}) person_serial.save() person = Person.objects.language("en").get(name="joe") self.assertEqual(person.given_name, "joe jambul")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_links_person_serializers(self): person_data = { "links": [ { "url": "http://twitter.com/sweemeng", } ] } person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en') person_serializer.is_valid() self.assertEqual(person_serializer.errors, {}) person_serializer.save() person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') url = person_.links.language("en").get(url="http://twitter.com/sweemeng") self.assertEqual(url.url, "http://twitter.com/sweemeng")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_create_nested_links_persons_serializer(self): person_data = { "id":"ab1a5788e5bae955c048748fa6af0e97", "contact_details":[ { "id": "a66cb422-eec3-4861-bae1-a64ae5dbde61", "links": [{ "url": "http://facebook.com", }] } ], } person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en') person_serializer.is_valid() self.assertEqual(person_serializer.errors, {}) person_serializer.save() person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') # There should be only 1 links in that contact contact = person_.contact_details.language('en').get(id='a66cb422-eec3-4861-bae1-a64ae5dbde61') links = contact.links.language('en').filter(url="http://sinarproject.org") self.assertEqual(links[0].url, "http://sinarproject.org")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_identifier_person_serializer(self): person_data = { "identifiers": [ { "scheme": "IC", "identifier": "129031309", } ] } person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en') person_serializer.is_valid() self.assertEqual(person_serializer.errors, {}) person_serializer.save() person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') identifier = person_.identifiers.language('en').get(identifier="129031309") self.assertEqual(identifier.scheme, "IC")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_contact_person_serializer(self): person_data = { "contact_details": [ { "type":"twitter", "value": "sinarproject", } ] } person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en') person_serializer.is_valid() self.assertEqual(person_serializer.errors, {}) person_serializer.save() person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') contact = person_.contact_details.language('en').get(type="twitter") self.assertEqual(contact.value, "sinarproject")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_other_name_person_serializer(self): person_data = { "other_names": [ { "name": "jane", "family_name": "jambul", "given_name": "test person", "start_date": "1950-01-01", "end_date": "2010-01-01", } ] } person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') person_serializer = PersonSerializer(person, data=person_data, partial=True, language='en') person_serializer.is_valid() self.assertEqual(person_serializer.errors, {}) person_serializer.save() person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') other_name = person_.other_names.language('en').get(name="jane") self.assertEqual(other_name.given_name, "test person")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_invalid_date_serializer(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "not john doe", "gender": "unknown", "summary": "person unit test api", "honorific_prefix": "Chief", "honorific_suffix": "of the fake people league", "biography": "He does not exists!!!!", "birth_date": "invalid date", "death_data": "invalid date", "email": "joejambul@sinarproject.org", } person_serial = PersonSerializer(data=person_data, language='en') person_serial.is_valid() self.assertNotEqual(person_serial.errors, {})
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_translated_serializer(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "bukan john doe", "gender": "tak tahu", "summary": "orang ujian", "honorific_prefix": "Datuk Seri", "biography": "Dia Tak wujud!!!!", "email": "joejambul@sinarproject.org", } person_serial = PersonSerializer(data=person_data, language='ms') person_serial.is_valid() self.assertEqual(person_serial.errors, {}) person_serial.save() person = Person.objects.language("ms").get(name="joe") self.assertEqual(person.given_name, "joe jambul")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_load_translated_person_membership_organization(self): person = Person.objects.untranslated().get(id="078541c9-9081-4082-b28f-29cbb64440cb") person_serializer = PersonSerializer(person, language="ms") data = person_serializer.data for membership in data["memberships"]: if membership["organization"]: self.assertEqual(membership["organization"]["language_code"], "ms")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_fetch_person_minimized_serializer(self): person = Person.objects.untranslated().get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') person_serializer = MinPersonSerializer(person) membership_count = person.memberships.count() self.assertTrue(len(person_serializer.data["memberships"]), membership_count)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_view_person_list(self): response = self.client.get("/en/persons/") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue("page" in response.data) self.assertEqual(response.data["per_page"], settings.REST_FRAMEWORK["PAGE_SIZE"]) self.assertEqual(response.data["num_pages"], 1)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_view_person_detail_not_exist(self): response = self.client.get("/en/persons/not_exist/") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_authorized(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "not john doe", "gender": "unknown", "summary": "person unit test api", "honorific_prefix": "Chief", "honorific_suffix": "of the fake people league", "biography": "He does not exists!!!!", "birth_date": "1950-01-01", "death_data": "2000-01-01", "email": "joejambul@sinarproject.org", "contact_details":[ { "type":"twitter", "value": "sinarproject", } ], "links":[ { "url":"http://sinarproject.org", } ], "identifiers":[ { "identifier": "9089098098", "scheme": "rakyat", } ], "other_names":[ { "name":"Jane", "family_name":"Jambul", "start_date": "1950-01-01", "end_date": "2010-01-01", } ] } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post("/en/persons/", person_data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) person = Person.objects.language("en").get(name="joe") self.assertEqual(person.name, "joe")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_not_exist_unauthorized(self): person_data = { "given_name": "jerry jambul", } response = self.client.put("/en/persons/not_exist/", person_data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_not_exist_authorized(self): person_data = { "given_name": "jerry jambul", } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put("/en/persons/not_exist/", person_data) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_links_authorized(self): person_data = { "links": [ { "url": "http://twitter.com/sweemeng", } ] } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data) self.assertEqual(response.status_code, status.HTTP_200_OK) person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') url = person_.links.language("en").get(url="http://twitter.com/sweemeng") self.assertEqual(url.url, "http://twitter.com/sweemeng")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_person_links_authorized(self): person_data = { "id":"ab1a5788e5bae955c048748fa6af0e97", "links":[ { "id": "a4ffa24a9ef3cbcb8cfaa178c9329367", "note": "just a random repo" } ] } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data) self.assertEqual(response.status_code, status.HTTP_200_OK) person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') url = person_.links.language("en").get(id="a4ffa24a9ef3cbcb8cfaa178c9329367") self.assertEqual(url.note, "just a random repo")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_nested_person_links_authorized(self): person_data = { "id":"ab1a5788e5bae955c048748fa6af0e97", "contact_details":[ { "id": "a66cb422-eec3-4861-bae1-a64ae5dbde61", "links": [{ "url": "http://facebook.com", }] } ], } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data) self.assertEqual(response.status_code, status.HTTP_200_OK) person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') # There should be only 1 links in that contact contact = person_.contact_details.language('en').get(id='a66cb422-eec3-4861-bae1-a64ae5dbde61') links = contact.links.language('en').all() check = False for i in links: if i.url == "http://sinarproject.org": check = True self.assertTrue(check, "http://sinarproject.org does not exist")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_nested_person_links_authorized(self): person_data = { "id":"8497ba86-7485-42d2-9596-2ab14520f1f4", "identifiers":[ { "id": "af7c01b5-1c4f-4c08-9174-3de5ff270bdb", "links": [{ "id": "9c9a2093-c3eb-4b51-b869-0d3b4ab281fd", "note": "this is just a test note", }] } ], } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/", person_data) self.assertEqual(response.status_code, status.HTTP_200_OK) person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') identifier = person_.identifiers.language('en').get(id="af7c01b5-1c4f-4c08-9174-3de5ff270bdb") link = identifier.links.language('en').get(id="9c9a2093-c3eb-4b51-b869-0d3b4ab281fd") self.assertEqual(link.note, "this is just a test note")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_other_names_authorized(self): person_data = { "other_names": [ { "name": "jane", "family_name": "jambul", "given_name": "test person", "start_date": "1950-01-01", "end_date": "2010-01-01", } ] } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put("/en/persons/ab1a5788e5bae955c048748fa6af0e97/", person_data) self.assertEqual(response.status_code, status.HTTP_200_OK) person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97') other_name = person_.other_names.language('en').get(name="jane") self.assertEqual(other_name.given_name, "test person")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_update_other_names_authorized(self): person_data = { "other_names": [ { "id": "cf93e73f-91b6-4fad-bf76-0782c80297a8", "family_name": "jambul", } ] } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.put("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/", person_data) self.assertEqual(response.status_code, status.HTTP_200_OK) person_ = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4') other_name = person_.other_names.language('en').get(id="cf93e73f-91b6-4fad-bf76-0782c80297a8") self.assertEqual(other_name.family_name, "jambul")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_not_exist_unauthorized(self): response = self.client.delete("/en/persons/not_exist/") self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_delete_person_not_exist_authorized(self): token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.delete("/en/persons/not_exist/") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_person_api_invalid_date(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "not john doe", "gender": "unknown", "summary": "person unit test api", "honorific_prefix": "Chief", "honorific_suffix": "of the fake people league", "biography": "He does not exists!!!!", "birth_date": "invalid date", "death_date": "invalid date", "email": "joejambul@sinarproject.org", } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post("/en/persons/", person_data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertTrue("errors" in response.data)
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_authorized_translated(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "bukan john doe", "gender": "tak tahu", "summary": "orang ujian", "honorific_prefix": "Datuk Seri", "biography": "Dia Tak wujud!!!!", "email": "joejambul@sinarproject.org", } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post("/ms/persons/", person_data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) person = Person.objects.language("ms").get(name="joe") self.assertEqual(person.name, "joe")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_identifier_blank_id_authorized(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "not john doe", "gender": "unknown", "summary": "person unit test api", "honorific_prefix": "Chief", "honorific_suffix": "of the fake people league", "biography": "He does not exists!!!!", "birth_date": "1950-01-01", "death_data": "2000-01-01", "email": "joejambul@sinarproject.org", "identifiers":[ { "id": "", "identifier": "9089098098", "scheme": "rakyat", } ], } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post("/en/persons/", person_data) logging.warn(response.data["result"]["other_names"]) self.assertEqual(response.status_code, status.HTTP_201_CREATED) identifiers = response.data["result"]["identifiers"][0] self.assertNotEqual(identifiers["id"], "")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_links_blank_id_authorized(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "not john doe", "gender": "unknown", "summary": "person unit test api", "honorific_prefix": "Chief", "honorific_suffix": "of the fake people league", "biography": "He does not exists!!!!", "birth_date": "1950-01-01", "death_data": "2000-01-01", "email": "joejambul@sinarproject.org", "links":[ { "id": "", "url":"http://sinarproject.org", } ], } token = Token.objects.get(user__username="admin") self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key) response = self.client.post("/en/persons/", person_data) logging.warn(response.data["result"]["other_names"]) self.assertEqual(response.status_code, status.HTTP_201_CREATED) links = response.data["result"]["links"][0] self.assertNotEqual(links["id"], "")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def test_create_person_with_all_field_birthdate_deathdate_blank_serializer(self): person_data = { "name": "joe", "family_name": "doe", "given_name": "joe jambul", "additional_name": "not john doe", "gender": "unknown", "summary": "person unit test api", "honorific_prefix": "Chief", "honorific_suffix": "of the fake people league", "biography": "He does not exists!!!!", "birth_date": "", "death_data": "", "email": "joejambul@sinarproject.org", "contact_details":[ { "type":"twitter", "value": "sinarproject", } ], "links":[ { "url":"http://sinarproject.org", } ], "identifiers":[ { "identifier": "9089098098", "scheme": "rakyat", } ], "other_names":[ { "name":"Jane", "family_name":"Jambul", "start_date": "1950-01-01", "end_date": "2010-01-01", } ] } person_serial = PersonSerializer(data=person_data, language='en') person_serial.is_valid() self.assertEqual(person_serial.errors, {}) person_serial.save() person = Person.objects.language("en").get(name="joe") self.assertEqual(person.given_name, "joe jambul")
Sinar/popit_ng
[ 21, 4, 21, 91, 1443407491 ]
def world_to_image_projection(p_world, intrinsics, pose_w2c): """Project points in the world frame to the image plane. Args: p_world: [HEIGHT, WIDTH, 3] points in the world's coordinate frame. intrinsics: [3, 3] camera's intrinsic matrix. pose_w2c: [3, 4] camera pose matrix (world to camera). Returns: [HEIGHT, WIDTH, 2] points in the image coordinate. [HEIGHT, WIDTH, 1] the z depth. """ shape = p_world.shape.as_list() height, width = shape[0], shape[1] p_world_homogeneous = tf.concat([p_world, tf.ones([height, width, 1])], -1) p_camera = tf.squeeze( tf.matmul(pose_w2c[tf.newaxis, tf.newaxis, :], tf.expand_dims(p_world_homogeneous, -1)), -1) p_camera = p_camera*tf.constant([1., 1., -1.], shape=[1, 1, 3]) p_image = tf.squeeze(tf.matmul(intrinsics[tf.newaxis, tf.newaxis, :], tf.expand_dims(p_camera, -1)), -1) z = p_image[:, :, -1:] return tf.math.divide_no_nan(p_image[:, :, :2], z), z
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def overlap_mask(depth1, pose1_c2w, depth2, pose2_c2w, intrinsics): """Compute the overlap masks of two views using triangulation. The masks have the same shape of the input images. A pixel value is true if it can be seen by both cameras. Args: depth1: [HEIGHT, WIDTH, 1] the depth map of the first view. pose1_c2w: [3, 4] camera pose matrix (camera to world) of the first view. pose1_c2w[:, :3] is the rotation and pose1_c2w[:, -1] is the translation. depth2: [HEIGHT, WIDTH, 1] the depth map of the second view. pose2_c2w: [3, 4] camera pose matrix (camera to world) of the second view. pose1_c2w[:, :3] is the rotation and pose1_c2w[:, -1] is the translation. intrinsics: [3, 3] camera's intrinsic matrix. Returns: [HEIGHT, WIDTH] two overlap masks of the two inputs respectively. """ pose1_w2c = tf.matrix_inverse( tf.concat([pose1_c2w, tf.constant([[0., 0., 0., 1.]])], 0))[:3] pose2_w2c = tf.matrix_inverse( tf.concat([pose2_c2w, tf.constant([[0., 0., 0., 1.]])], 0))[:3] p_world1 = image_to_world_projection(depth1, intrinsics, pose1_c2w) p_image1_in_2, z1_c2 = world_to_image_projection( p_world1, intrinsics, pose2_w2c) p_world2 = image_to_world_projection(depth2, intrinsics, pose2_c2w) p_image2_in_1, z2_c1 = world_to_image_projection( p_world2, intrinsics, pose1_w2c) shape = depth1.shape.as_list() height, width = shape[0], shape[1] height = tf.cast(height, tf.float32) width = tf.cast(width, tf.float32) # Error tolerance. eps = 1e-4 # check the object seen by camera 2 is also projected to camera 1's image # plane and in front of the camera 1. mask_h2_in_1 = tf.logical_and( tf.less_equal(p_image2_in_1[:, :, 1], height+eps), tf.greater_equal(p_image2_in_1[:, :, 1], 0.-eps)) mask_w2_in_1 = tf.logical_and( tf.less_equal(p_image2_in_1[:, :, 0], width+eps), tf.greater_equal(p_image2_in_1[:, :, 0], 0.-eps)) # check the projected points are within the image boundaries and in front of # the camera. mask2_in_1 = tf.logical_and( tf.logical_and(mask_h2_in_1, mask_w2_in_1), tf.squeeze(z2_c1, -1) > 0) # check the object seen by camera 1 is also projected to camera 2's image # plane and in front of the camera 2. mask_h1_in_2 = tf.logical_and( tf.less_equal(p_image1_in_2[:, :, 1], height+eps), tf.greater_equal(p_image1_in_2[:, :, 1], 0.-eps)) mask_w1_in_2 = tf.logical_and( tf.less_equal(p_image1_in_2[:, :, 0], width+eps), tf.greater_equal(p_image1_in_2[:, :, 0], 0.-eps)) # check the projected points are within the image boundaries and in front of # the camera. mask1_in_2 = tf.logical_and( tf.logical_and(mask_h1_in_2, mask_w1_in_2), tf.squeeze(z1_c2, -1) > 0) return mask1_in_2, mask2_in_1
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def generate_from_meta(meta_data_path, pano_data_dir, pano_height=1024, pano_width=2048, output_height=512, output_width=512): """Generate the stereo image dataset from Matterport3D using the meta data. Example call: ds = generate_from_meta( meta_data_path='matterport3d/saved_meta/R90_fov90/test_meta/', pano_data_dir='matterport3d/pano/') Args: meta_data_path: (string) the path to the meta data files. pano_data_dir: (string) the path to the panorama images of the Matterport3D. pano_height: (int) the height dimension of the panorama images. pano_width: (int) the width dimension of the panorama images. output_height: (int) the height dimension of the output perspective images. output_width: (int) the width dimension of the output perspective images. Returns: Tensorflow Dataset. """ def load_text(file_path, n_lines=200): """Load text data from a file.""" return tf.data.Dataset.from_tensor_slices( tf.data.experimental.get_single_element( tf.data.TextLineDataset(file_path).batch(n_lines))) def load_single_image(filename): """Load a single image given the filename.""" image = tf.image.decode_jpeg(tf.read_file(filename), 3) image = tf.image.convert_image_dtype(image, tf.float32) image.set_shape([pano_height, pano_width, 3]) return image def string_to_matrix(s, shape): """Decode strings to matrices tensor.""" m = tf.reshape( tf.stack([tf.decode_csv(s, [0.0] * np.prod(shape))], 0), shape) m.set_shape(shape) return m def decode_line(line): """Decode text lines.""" DataPair = collections.namedtuple( 'DataPair', ['src_img', 'trt_img', 'fov', 'rotation', 'translation']) splitted = tf.decode_csv(line, ['']*10, field_delim=' ') img1 = load_single_image(pano_data_dir+splitted[0]+'/'+splitted[1]+'.jpeg') img2 = load_single_image(pano_data_dir+splitted[0]+'/'+splitted[2]+'.jpeg') fov = string_to_matrix(splitted[3], [1]) r1 = string_to_matrix(splitted[4], [3, 3]) t1 = string_to_matrix(splitted[5], [3]) r2 = string_to_matrix(splitted[6], [3, 3]) t2 = string_to_matrix(splitted[7], [3]) sampled_r1 = string_to_matrix(splitted[8], [3, 3]) sampled_r2 = string_to_matrix(splitted[9], [3, 3]) r_c2_to_c1 = tf.matmul(sampled_r1, sampled_r2, transpose_a=True) t_c1 = tf.squeeze(tf.matmul(sampled_r1, tf.expand_dims(tf.nn.l2_normalize(t2-t1), -1), transpose_a=True)) sampled_rotation = tf.matmul(tf.stack([sampled_r1, sampled_r2], 0), tf.stack([r1, r2], 0), transpose_a=True) sampled_views = transformation.rectilinear_projection( tf.stack([img1, img2], 0), [output_height, output_width], fov, tf.matrix_transpose(sampled_rotation)) src_img, trt_img = sampled_views[0], sampled_views[1] return DataPair(src_img, trt_img, fov, r_c2_to_c1, t_c1) # meta_data_path has slash '/' at the end. ds = tf.data.Dataset.list_files(meta_data_path+'*') ds = ds.flat_map(load_text) ds = ds.map(decode_line) return ds
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def __init__(self, reader_class, common_queue, num_readers=4, reader_kwargs=None): """ParallelReader creates num_readers instances of the reader_class. Each instance is created by calling the `reader_class` function passing the arguments specified in `reader_kwargs` as in: reader_class(**read_kwargs) When you read from a ParallelReader, with its `read()` method, you just dequeue examples from the `common_queue`. The readers will read different files in parallel, asynchronously enqueueing their output into `common_queue`. The `common_queue.dtypes` must be [tf.string, tf.string] Because each reader can read from a different file, the examples in the `common_queue` could be from different files. Due to the asynchronous reading there is no guarantee that all the readers will read the same number of examples. If the `common_queue` is a shuffling queue, then the examples are shuffled. Usage: common_queue = tf.queue.RandomShuffleQueue( capacity=256, min_after_dequeue=128, dtypes=[tf.string, tf.string]) p_reader = ParallelReader(tf.compat.v1.TFRecordReader, common_queue) common_queue = tf.queue.FIFOQueue( capacity=256, dtypes=[tf.string, tf.string]) p_reader = ParallelReader(readers, common_queue, num_readers=2) Args: reader_class: one of the io_ops.ReaderBase subclasses ex: TFRecordReader common_queue: a Queue to hold (key, value pairs) with `dtypes` equal to [tf.string, tf.string]. Must be one of the data_flow_ops.Queues instances, ex. `tf.queue.FIFOQueue()`, `tf.queue.RandomShuffleQueue()`, ... num_readers: a integer, number of instances of reader_class to create. reader_kwargs: an optional dict of kwargs to create the readers. Raises: TypeError: if `common_queue.dtypes` is not [tf.string, tf.string]. """ if len(common_queue.dtypes) != 2: raise TypeError('common_queue.dtypes must be [tf.string, tf.string]') for dtype in common_queue.dtypes: if not dtype.is_compatible_with(tf_dtypes.string): raise TypeError('common_queue.dtypes must be [tf.string, tf.string]') reader_kwargs = reader_kwargs or {} self._readers = [reader_class(**reader_kwargs) for _ in range(num_readers)] self._common_queue = common_queue
google-research/tf-slim
[ 334, 98, 334, 11, 1561681288 ]
def num_readers(self): return len(self._readers)
google-research/tf-slim
[ 334, 98, 334, 11, 1561681288 ]
def common_queue(self): return self._common_queue
google-research/tf-slim
[ 334, 98, 334, 11, 1561681288 ]
def read_up_to(self, queue, num_records, name=None): """Returns up to num_records (key, value pairs) produced by a reader. Will dequeue a work unit from queue if necessary (e.g., when the Reader needs to start reading from a new file since it has finished with the previous file). It may return less than num_records even before the last batch. **Note** This operation is not supported by all types of `common_queue`s. If a `common_queue` does not support `dequeue_up_to()`, then a `tf.errors.UnimplementedError` is raised. Args: queue: A Queue or a mutable string Tensor representing a handle to a Queue, with string work items. num_records: Number of records to read. name: A name for the operation (optional). Returns: A tuple of Tensors (keys, values) from common_queue. keys: A 1-D string Tensor. values: A 1-D string Tensor. """ self._configure_readers_by(queue) return self._common_queue.dequeue_up_to(num_records, name)
google-research/tf-slim
[ 334, 98, 334, 11, 1561681288 ]
def num_records_produced(self, name=None): """Returns the number of records this reader has produced. Args: name: A name for the operation (optional). Returns: An int64 Tensor. """ num_records = [r.num_records_produced() for r in self._readers] return math_ops.add_n(num_records, name=name)
google-research/tf-slim
[ 334, 98, 334, 11, 1561681288 ]
def parallel_read(data_sources, reader_class, num_epochs=None, num_readers=4, reader_kwargs=None, shuffle=True, dtypes=None, capacity=256, min_after_dequeue=128, seed=None, scope=None): """Reads multiple records in parallel from data_sources using n readers. It uses a ParallelReader to read from multiple files in parallel using multiple readers created using `reader_class` with `reader_kwargs'. If shuffle is True the common_queue would be a RandomShuffleQueue otherwise it would be a FIFOQueue. Usage: data_sources = ['path_to/train*'] key, value = parallel_read(data_sources, tf.CSVReader, num_readers=4) Args: data_sources: a list/tuple of files or the location of the data, i.e. /path/to/train@128, /path/to/train* or /tmp/.../train* reader_class: one of the io_ops.ReaderBase subclasses ex: TFRecordReader num_epochs: The number of times each data source is read. If left as None, the data will be cycled through indefinitely. num_readers: a integer, number of Readers to create. reader_kwargs: an optional dict, of kwargs for the reader. shuffle: boolean, whether should shuffle the files and the records by using RandomShuffleQueue as common_queue. dtypes: A list of types. The length of dtypes must equal the number of elements in each record. If it is None it will default to [tf.string, tf.string] for (key, value). capacity: integer, capacity of the common_queue. min_after_dequeue: integer, minimum number of records in the common_queue after dequeue. Needed for a good shuffle. seed: A seed for RandomShuffleQueue. scope: Optional name scope for the ops. Returns: key, value: a tuple of keys and values from the data_source. """ data_files = get_data_files(data_sources) with ops.name_scope(scope, 'parallel_read'): filename_queue = tf_input.string_input_producer( data_files, num_epochs=num_epochs, shuffle=shuffle, seed=seed, name='filenames') dtypes = dtypes or [tf_dtypes.string, tf_dtypes.string] if shuffle: common_queue = data_flow_ops.RandomShuffleQueue( capacity=capacity, min_after_dequeue=min_after_dequeue, dtypes=dtypes, seed=seed, name='common_queue') else: common_queue = data_flow_ops.FIFOQueue( capacity=capacity, dtypes=dtypes, name='common_queue') summary.scalar( 'fraction_of_%d_full' % capacity, math_ops.cast(common_queue.size(), tf_dtypes.float32) * (1. / capacity)) return ParallelReader( reader_class, common_queue, num_readers=num_readers, reader_kwargs=reader_kwargs).read(filename_queue)
google-research/tf-slim
[ 334, 98, 334, 11, 1561681288 ]
def device_broadcast(x, num_devices): """Broadcast a value to all devices.""" return jax.pmap(lambda _: x)(jnp.arange(num_devices))
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def batched_loss_fn(model): """Apply loss function across a batch of examples.""" loss, metrics = jax.vmap(loss_fn, (None, 0, None))(model, batched_examples, static_batch_metadata) return jnp.mean(loss), metrics
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def _build_parallel_train_step(): """Builds an accelerated version of the train step function.""" # We need to wrap and unwrap so that the final function can be called with # keyword arguments, but we still maintain the proper axes. @functools.partial( jax.pmap, axis_name="devices", in_axes=(0, 0, None, None, None, None), static_broadcasted_argnums=(2, 3)) def wrapped(optimizer, batched_examples, static_batch_metadata, loss_fn, max_global_norm, optimizer_hyper_params): return _parallel_train_step(optimizer, batched_examples, static_batch_metadata, loss_fn, max_global_norm, **optimizer_hyper_params) @functools.wraps(_parallel_train_step) def wrapper(optimizer, batched_examples, static_batch_metadata, loss_fn, max_global_norm, **optimizer_hyper_params): return wrapped(optimizer, batched_examples, static_batch_metadata, loss_fn, max_global_norm, optimizer_hyper_params) return wrapper
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def warmup_train_step( optimizer, batched_example, static_batch_metadata, loss_fn, optimizer_is_replicated = False, profile = False, runner=None,
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def go(): # Note that value for learning_rate is arbitrary, but we pass it here to # warm up the jit cache (since we are passing a learning rate at training # time). res = parallel_train_step( replicated_optimizer, batched_example, static_batch_metadata, loss_fn, max_global_norm=max_global_norm, learning_rate=0.0) jax.tree_map(lambda x: x.block_until_ready(), res)
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def build_averaging_validator( loss_fn, valid_iterator_factory, objective_metric_name = None, include_total_counts = False, prefetch = True,
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def parallel_metrics_batch(model, batched_examples, batch_mask, static_metadata): loss, metrics = jax.vmap(loss_fn, (None, 0, None))(model, batched_examples, static_metadata) metrics["loss"] = loss metrics = jax.tree_map( lambda x: jnp.where(batch_mask, x, jnp.zeros_like(x)), metrics) metrics = jax.tree_map(lambda x: jax.lax.psum(jnp.sum(x), "devices"), metrics) return metrics
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def __init__(self, spec): pass
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def default_matrix(self): """Returns default matrix for initialization. Size is taken from spec. """ raise NotImplementedError()
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def __init__(self, spec): """Initializer. Args: spec: hparams object with default value given by self.get_default_hparams(). """ super(LowRankDecompMatrixCompressor, self).__init__(spec) self._spec = spec self.uncompressed_size = 0 self.compressed_size = 0
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def get_default_hparams(): """Get a tf.HParams object with the default values for the hyperparameters. name: string name of the low-rank matrix decompressor specification. rank: integer rank of the low-rank decomposition that is performed. compressor_option: integer indicates what type of factorization (if any) is used. is_b_matrix_trainable: bool indicates whether the b_matrix matrix in the factorization is to be trained. is_c_matrix_trainable: bool indicates whether the c_matrix matrix in the factorization is to be trained. Returns: tf.HParams object initialized to default values. """ return HParams( name='model_compression', rank=100, num_rows=10, num_cols=10, use_tpu=False, compressor_option=0, is_b_matrix_trainable=True, is_c_matrix_trainable=True, is_c_matrix_present=True, block_size=1, pruning_fraction=0.0, use_lsh=False)
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def __init__(self, scope='default_scope', spec=None, global_step=None): pass
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def get_update_op(self): """Update operator. Returns: TF operator that implements the update steps that may need to be applied periodically. """ raise NotImplementedError()
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def __init__(self, scope='default_scope', spec=None, global_step=None, layer=None): """Initializer. Args: scope: TF scope used for creating new TF variables. spec: compression hyper parameters default value given by self.get_default_hparams(). global_step: tf variable that has the global step. layer: Layer to compress. """ super(CompressionOp, self).__init__(scope, spec, global_step) # Compression specification self._spec = spec # Sanity check for compression hparams self._validate_spec() self._global_step = global_step # public member variables to track the compressor, the variables and # other tf nodes corresponding to this OP. self.matrix_compressor = None self.a_matrix_tfvar = None self.b_matrix_tfvar = None self.c_matrix_tfvar = None self.alpha = None self.layer = layer self.last_alpha_update_step = None self.uncompressed_size = 0 self.compressed_size = 0
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def get_default_hparams(): """Get a tf.HParams object with the default values for the hyperparameters. name: string name of the compression specification. Used for adding summaries and ops under a common tensorflow name_scope. alpha_decrement_value: float a positive real number by which alpha is decremented at each update. begin_compression_step: integer the global step at which to begin compression. end_compression_step: integer the global step at which to terminate compression. Defaults to -1 implying that compression continues till the training stops. use_tpu: False indicates whether to use TPU. compression_option: integer indicates what type of factorization (if any) is used. rank: integer indicates what type of factorization (if any) is used. update_option: integer indicates how the update logic is being run. More specifically: 0 - run the update logic in TF; needed when using GPU/TPU. 1 - run the update logic in regular python as opposed to TF. 2 - run the update logic in TF and in regular python. Returns: tf.HParams object initialized to default values. """ return HParams( name='model_compression', alpha_decrement_value=0.01, begin_compression_step=0, end_compression_step=-1, compression_frequency=10, use_tpu=False, compression_option=0, rank=100, update_option=0, run_update_interval_check=1, block_size=1, pruning_fraction=0.0, begin_pruning_step=0, end_pruning_step=-1, weight_sparsity_map=[''], block_dims_map=[''], threshold_decay=0.0, pruning_frequency=10, nbins=256, block_height=1, block_width=1, block_pooling_function='AVG', initial_sparsity=0.0, target_sparsity=0.5, sparsity_function_begin_step=0, sparsity_function_end_step=100, sparsity_function_exponent=3.0, gradient_decay_rate=0.99, prune_option='weight')
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def compressed_matmul_keras(self, inputs, training=False): """Matmul with a convex combination of original and compressed weights.""" if training: compressed_mat = self.alpha * self.a_matrix_tfvar + ( 1 - self.alpha) * tf.matmul(self.b_matrix_tfvar, self.c_matrix_tfvar) return tf.matmul(inputs, compressed_mat) else: # This prevents the TFLite converter from constant-folding the product of # B & C matrices. intermediate = tf.matmul(inputs, self.b_matrix_tfvar) return tf.matmul(intermediate, self.c_matrix_tfvar)
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def maybe_update_alpha(): """Maybe update the alpha param. Checks if global_step is between begin_compression_step and end_compression_step, and if the current training step is a compression step. Returns: Boolean tensor whether the training step is a compression step. """ is_step_within_compression_range = tf.logical_and( tf.greater_equal( tf.cast(self._global_step, tf.int32), self._spec.begin_compression_step), tf.logical_or( tf.less_equal( tf.cast(self._global_step, tf.int32), self._spec.end_compression_step), tf.less(self._spec.end_compression_step, 0))) is_compression_step = tf.less_equal( tf.add(self.last_alpha_update_step, self._spec.compression_frequency), tf.cast(self._global_step, tf.int32)) return tf.logical_and(is_step_within_compression_range, is_compression_step)
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def compressor_and_alpha_update_op_fn(): return self._compressor_and_alpha_update_op()
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def _compressor_op(self, matrix_compressor, a_matrix_tfvar): """Creates compressor op based on matrix_compressor. Meant to create the factors once at begin_compression_step. Args: matrix_compressor: specifies the matrix compressor object. a_matrix_tfvar: the tf tensor to be compressed. """ [b_matrix_out, c_matrix_out ] = tf.compat.v1.py_function(matrix_compressor.static_matrix_compressor, [a_matrix_tfvar], [tf.float32, tf.float32]) self.b_matrix_tfvar.assign(b_matrix_out) self.c_matrix_tfvar.assign(c_matrix_out) return
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def _compressor_and_alpha_update_op(self): """Applies compressor and also updates alpha.""" self._compressor_op(self.matrix_compressor, self.a_matrix_tfvar) self._update_alpha_op() self.last_alpha_update_step.assign(tf.cast(self._global_step, tf.int32))
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def __init__(self, scope, compression_spec, compressor, global_step=None): """Initializer. Args: scope: TF scope used for creating new TF variables. compression_spec: compression hyper parameters. compressor: matrix compressor object of class MatrixCompressorInferface. global_step: tf variable that has the global step. """ logging.info('Entering ApplyCompression constructor') self._compression_op_spec = compression_spec self._scope = scope self._global_step = global_step self._matrix_compressor = compressor self._compression_ops = [] self._update_ops = [] self._all_update_op = None self.uncompressed_size = 0 self.compressed_size = 0
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def get_operator_hparam(self, hparam): """Returns the value of queried hparam of the compression operator.""" return self._compression_op_spec.get(hparam)
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def random_part_lens(max_n_parts, max_part_size): return tuple(random.randint(1, max_part_size) for _ in range(random.randint(1, max_n_parts)))
quantumlib/Cirq
[ 3678, 836, 3678, 314, 1513294909 ]
def test_shift_swap_network_gate_acquaintance_opps(left_part_lens, right_part_lens): gate = cca.ShiftSwapNetworkGate(left_part_lens, right_part_lens) n_qubits = gate.qubit_count() qubits = cirq.LineQubit.range(n_qubits) strategy = cirq.Circuit(gate(*qubits)) # actual_opps initial_mapping = {q: i for i, q in enumerate(qubits)} actual_opps = cca.get_logical_acquaintance_opportunities(strategy, initial_mapping) # expected opps i = 0 sides = ('left', 'right') parts = {side: [] for side in sides} for side, part_lens in zip(sides, (left_part_lens, right_part_lens)): for part_len in part_lens: parts[side].append(set(range(i, i + part_len))) i += part_len expected_opps = set( frozenset(left_part | right_part) for left_part, right_part in itertools.product(parts['left'], parts['right']) ) assert actual_opps == expected_opps
quantumlib/Cirq
[ 3678, 836, 3678, 314, 1513294909 ]
def test_shift_swap_network_gate_diagrams(left_part_lens, right_part_lens): gate = cca.ShiftSwapNetworkGate(left_part_lens, right_part_lens) n_qubits = gate.qubit_count() qubits = cirq.LineQubit.range(n_qubits) circuit = cirq.Circuit(gate(*qubits)) diagram = circuit_diagrams['undecomposed', left_part_lens, right_part_lens] cirq.testing.assert_has_diagram(circuit, diagram) cca.expose_acquaintance_gates(circuit) diagram = circuit_diagrams['decomposed', left_part_lens, right_part_lens] cirq.testing.assert_has_diagram(circuit, diagram)
quantumlib/Cirq
[ 3678, 836, 3678, 314, 1513294909 ]
def test_shift_swap_network_gate_repr(left_part_lens, right_part_lens): gate = cca.ShiftSwapNetworkGate(left_part_lens, right_part_lens) cirq.testing.assert_equivalent_repr(gate) gate = cca.ShiftSwapNetworkGate(left_part_lens, right_part_lens, cirq.ZZ) cirq.testing.assert_equivalent_repr(gate)
quantumlib/Cirq
[ 3678, 836, 3678, 314, 1513294909 ]
def _read_proto_file(filename, proto): filename = filename # OSS: removed internal filename loading. with tf.io.gfile.GFile(filename, 'r') as proto_file: return text_format.ParseLines(proto_file, proto)
google-research/deeplab2
[ 878, 146, 878, 24, 1620859177 ]
def test_resnet50_encoder_creation(self): backbone_options = config_pb2.ModelOptions.BackboneOptions( name='resnet50', output_stride=32) encoder = builder.create_encoder( backbone_options, tf.keras.layers.experimental.SyncBatchNormalization) self.assertIsInstance(encoder, axial_resnet_instances.ResNet50)
google-research/deeplab2
[ 878, 146, 878, 24, 1620859177 ]
def test_mobilenet_encoder_creation(self, model_name): backbone_options = config_pb2.ModelOptions.BackboneOptions( name=model_name, use_squeeze_and_excite=True, output_stride=32) encoder = builder.create_encoder( backbone_options, tf.keras.layers.experimental.SyncBatchNormalization) self.assertIsInstance(encoder, mobilenet.MobileNet)
google-research/deeplab2
[ 878, 146, 878, 24, 1620859177 ]
def test_decoder_creation(self): proto_filename = os.path.join( _CONFIG_PATH, 'example_kitti-step_motion_deeplab.textproto') model_options = _read_proto_file(proto_filename, config_pb2.ModelOptions()) motion_decoder = builder.create_decoder( model_options, tf.keras.layers.experimental.SyncBatchNormalization, ignore_label=255) self.assertIsInstance(motion_decoder, motion_deeplab_decoder.MotionDeepLabDecoder)
google-research/deeplab2
[ 878, 146, 878, 24, 1620859177 ]
def _cfg(url='', **kwargs): return { 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, 'crop_pct': .96, 'interpolation': 'bicubic', 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, 'classifier': 'head', 'first_conv': 'stem.0', **kwargs }
rwightman/pytorch-image-models
[ 23978, 3956, 23978, 96, 1549086672 ]
def __init__(self, fn): super().__init__() self.fn = fn
rwightman/pytorch-image-models
[ 23978, 3956, 23978, 96, 1549086672 ]