mathfa03 HathawayLiu commited on
Commit
6d85999
·
0 Parent(s):

Duplicate from HathawayLiu/housing_dataset

Browse files

Co-authored-by: Xinyan Liu <HathawayLiu@users.noreply.huggingface.co>

.gitattributes ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.lz4 filter=lfs diff=lfs merge=lfs -text
12
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
13
+ *.model filter=lfs diff=lfs merge=lfs -text
14
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
15
+ *.npy filter=lfs diff=lfs merge=lfs -text
16
+ *.npz filter=lfs diff=lfs merge=lfs -text
17
+ *.onnx filter=lfs diff=lfs merge=lfs -text
18
+ *.ot filter=lfs diff=lfs merge=lfs -text
19
+ *.parquet filter=lfs diff=lfs merge=lfs -text
20
+ *.pb filter=lfs diff=lfs merge=lfs -text
21
+ *.pickle filter=lfs diff=lfs merge=lfs -text
22
+ *.pkl filter=lfs diff=lfs merge=lfs -text
23
+ *.pt filter=lfs diff=lfs merge=lfs -text
24
+ *.pth filter=lfs diff=lfs merge=lfs -text
25
+ *.rar filter=lfs diff=lfs merge=lfs -text
26
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
27
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
29
+ *.tar filter=lfs diff=lfs merge=lfs -text
30
+ *.tflite filter=lfs diff=lfs merge=lfs -text
31
+ *.tgz filter=lfs diff=lfs merge=lfs -text
32
+ *.wasm filter=lfs diff=lfs merge=lfs -text
33
+ *.xz filter=lfs diff=lfs merge=lfs -text
34
+ *.zip filter=lfs diff=lfs merge=lfs -text
35
+ *.zst filter=lfs diff=lfs merge=lfs -text
36
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
37
+ # Audio files - uncompressed
38
+ *.pcm filter=lfs diff=lfs merge=lfs -text
39
+ *.sam filter=lfs diff=lfs merge=lfs -text
40
+ *.raw filter=lfs diff=lfs merge=lfs -text
41
+ # Audio files - compressed
42
+ *.aac filter=lfs diff=lfs merge=lfs -text
43
+ *.flac filter=lfs diff=lfs merge=lfs -text
44
+ *.mp3 filter=lfs diff=lfs merge=lfs -text
45
+ *.ogg filter=lfs diff=lfs merge=lfs -text
46
+ *.wav filter=lfs diff=lfs merge=lfs -text
47
+ # Image files - uncompressed
48
+ *.bmp filter=lfs diff=lfs merge=lfs -text
49
+ *.gif filter=lfs diff=lfs merge=lfs -text
50
+ *.png filter=lfs diff=lfs merge=lfs -text
51
+ *.tiff filter=lfs diff=lfs merge=lfs -text
52
+ # Image files - compressed
53
+ *.jpg filter=lfs diff=lfs merge=lfs -text
54
+ *.jpeg filter=lfs diff=lfs merge=lfs -text
55
+ *.webp filter=lfs diff=lfs merge=lfs -text
56
+ Building_Permits_Cleaned.csv filter=lfs diff=lfs merge=lfs -text
Housing_dataset.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+
14
+ import csv
15
+ import json
16
+ import os
17
+ from typing import List
18
+ import datasets
19
+ import logging
20
+ import pandas as pd
21
+
22
+ _CITATION = """
23
+ @InProceedings{huggingface:dataset,
24
+ title = {Seattle Housing Permits Dataset},
25
+ author={Xinyan(Hathaway) Liu
26
+ },
27
+ year={2024}
28
+ }
29
+ """
30
+
31
+ _DESCRIPTION = """
32
+ This typical dataset contains all the building permits issued or in progress
33
+ within the city of Seattle starting from 1990 to recent, and this dataset is
34
+ still updating as time flows. Information includes permit records urls,
35
+ detailed address, and building costs etc.
36
+ """
37
+
38
+ _HOMEPAGE = "https://data.seattle.gov/Permitting/Building-Permits/76t5-zqzr/about_data"
39
+
40
+ _LICENSE = " http://www.seattle.gov/sdci"
41
+
42
+ _URL = "https://data.seattle.gov/Permitting/Building-Permits/76t5-zqzr/about_data"
43
+ _URLS = {
44
+ "train": "https://github.com/HathawayLiu/Housing_dataset/raw/main/housing_train_dataset.csv",
45
+ "test": "https://github.com/HathawayLiu/Housing_dataset/raw/main/housing_test_dataset.csv",
46
+ }
47
+
48
+ class HousingDataset(datasets.GeneratorBasedBuilder):
49
+ """This dataset contains all building permits issued or in progress within
50
+ the city of Seattle. It includes the original columns in the datasets, with
51
+ new added columns for corresponding neighborhood district and parking lot
52
+ near by each housing."""
53
+
54
+ _URLS = _URLS
55
+ VERSION = datasets.Version("1.1.0")
56
+
57
+ def _info(self):
58
+ return datasets.DatasetInfo(
59
+ description=_DESCRIPTION,
60
+ features=datasets.Features(
61
+ {
62
+ # columns from original dataset
63
+ "PermitNum": datasets.Value("string"),
64
+ "PermitClass": datasets.Value("string"),
65
+ "PermitClassMapped": datasets.Value("string"),
66
+ "PermitTypeMapped": datasets.Value("string"),
67
+ "PermitTypeDesc": datasets.Value("string"),
68
+ "Description": datasets.Value("string"),
69
+ "HousingUnits": datasets.Value("int64"),
70
+ "HousingUnitsRemoved": datasets.Value("int64"),
71
+ "HousingUnitsAdded": datasets.Value("int64"),
72
+ "EstProjectCost": datasets.Value("float32"),
73
+ "AppliedDate": datasets.Value("string"),
74
+ "IssuedDate": datasets.Value("string"),
75
+ "ExpiresDate": datasets.Value("string"),
76
+ "CompletedDate": datasets.Value("string"),
77
+ "StatusCurrent": datasets.Value("string"),
78
+ "RelatedMup": datasets.Value("string"),
79
+ "OriginalAddress1": datasets.Value("string"),
80
+ "OriginalCity": datasets.Value("string"),
81
+ "OriginalState": datasets.Value("string"),
82
+ "OriginalZip": datasets.Value("int64"),
83
+ "ContractorCompanyName": datasets.Value("string"),
84
+ "Link": datasets.Value("string"),
85
+ "Latitude": datasets.Value("float32"),
86
+ "Longitude": datasets.Value("float32"),
87
+ "Location1": datasets.Value("string"),
88
+ # new added columns below
89
+ "NeighborDistrict": datasets.Value("string")
90
+ }
91
+ ),
92
+ # No default supervised_keys (as we have to pass both question
93
+ # and context as input).
94
+ supervised_keys=None,
95
+ homepage=_HOMEPAGE,
96
+ citation=_CITATION,
97
+ )
98
+
99
+ def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
100
+ urls = self._URLS
101
+ downloaded_files = dl_manager.download_and_extract(urls)
102
+
103
+ return [
104
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": downloaded_files["train"]}),
105
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": downloaded_files["test"]}),
106
+ ]
107
+
108
+ def _generate_examples(self, filepath):
109
+ """This function returns the examples in the raw (text) form."""
110
+ logging.info("generating examples from = %s", filepath)
111
+ with open(filepath) as f:
112
+ housing_df = pd.read_csv(f)
113
+
114
+ housing_df['EstProjectCost'] = housing_df["EstProjectCost"].replace('NA', 0)
115
+ housing_df.dropna(subset = ['Latitude'], inplace = True)
116
+ housing_df.dropna(subset = ['OriginalZip'], inplace = True)
117
+
118
+ housing_df['Latitude'] = housing_df['Latitude'].astype(float)
119
+ housing_df['Longitude'] = housing_df['Longitude'].astype(float)
120
+
121
+ # Iterating through each row to generate examples
122
+ for index, row in housing_df.iterrows():
123
+ yield index, {
124
+ "PermitNum": row.get("PermitNum", ""),
125
+ "PermitClass": row.get("PermitClass", ""),
126
+ "PermitClassMapped": row.get("PermitClassMapped", ""),
127
+ "PermitTypeMapped": row.get("PermitTypeMapped", ""),
128
+ "PermitTypeDesc": row.get("PermitTypeDesc", ""),
129
+ "Description": row.get("Description", ""),
130
+ "HousingUnits": int(row.get("HousingUnits", "")),
131
+ "HousingUnitsRemoved": int(row.get("HousingUnitsRemoved", "")),
132
+ "HousingUnitsAdded": int(row.get("HousingUnitsAdded", "")),
133
+ "EstProjectCost": float(row.get("EstProjectCost", "")),
134
+ "AppliedDate": str(row.get("AppliedDate", "")),
135
+ "IssuedDate": str(row.get("IssuedDate", "")),
136
+ "ExpiresDate": str(row.get("ExpiresDate", "")),
137
+ "CompletedDate": str(row.get("CompletedDate", "")),
138
+ "StatusCurrent": row.get("StatusCurrent", ""),
139
+ "RelatedMup": row.get("RelatedMup", ""),
140
+ "OriginalAddress1": row.get("OriginalAddress1", ""),
141
+ "OriginalCity": row.get("OriginalCity", ""),
142
+ "OriginalState": row.get("OriginalState", ""),
143
+ "OriginalZip": int(row.get("OriginalZip", "")),
144
+ "ContractorCompanyName": row.get("ContractorCompanyName", ""),
145
+ "Link": row.get("Link", ""),
146
+ "Latitude": row["Latitude"],
147
+ "Longitude": row["Longitude"],
148
+ "Location1": str(row["Latitude"]) + ", " + str(row["Longitude"]),
149
+ "NeighborDistrict": row.get("NeighborDistrict", "")
150
+ }
README.md ADDED
@@ -0,0 +1,269 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ size_categories:
5
+ - 100K<n<1M
6
+ tags:
7
+ - housing
8
+ - permits
9
+ - Seattle
10
+ dataset_info:
11
+ features:
12
+ - name: PermitNum
13
+ dtype: string
14
+ - name: PermitClass
15
+ dtype: string
16
+ - name: PermitClassMapped
17
+ dtype: string
18
+ - name: PermitTypeMapped
19
+ dtype: string
20
+ - name: PermitTypeDesc
21
+ dtype: string
22
+ - name: Description
23
+ dtype: string
24
+ - name: HousingUnits
25
+ dtype: int64
26
+ - name: HousingUnitsRemoved
27
+ dtype: int64
28
+ - name: HousingUnitsAdded
29
+ dtype: int64
30
+ - name: EstProjectCost
31
+ dtype: float32
32
+ - name: AppliedDate
33
+ dtype: string
34
+ - name: IssuedDate
35
+ dtype: string
36
+ - name: ExpiresDate
37
+ dtype: string
38
+ - name: CompletedDate
39
+ dtype: string
40
+ - name: StatusCurrent
41
+ dtype: string
42
+ - name: RelatedMup
43
+ dtype: string
44
+ - name: OriginalAddress1
45
+ dtype: string
46
+ - name: OriginalCity
47
+ dtype: string
48
+ - name: OriginalState
49
+ dtype: string
50
+ - name: OriginalZip
51
+ dtype: int64
52
+ - name: ContractorCompanyName
53
+ dtype: string
54
+ - name: Link
55
+ dtype: string
56
+ - name: Latitude
57
+ dtype: float32
58
+ - name: Longitude
59
+ dtype: float32
60
+ - name: Location1
61
+ dtype: string
62
+ - name: NeighborDistrict
63
+ dtype: string
64
+ splits:
65
+ - name: train
66
+ num_bytes: 47214591
67
+ num_examples: 97541
68
+ - name: test
69
+ num_bytes: 11802066
70
+ num_examples: 24388
71
+ download_size: 18076020
72
+ dataset_size: 59016657
73
+ configs:
74
+ - config_name: default
75
+ data_files:
76
+ - split: train
77
+ path: data/train-*
78
+ - split: test
79
+ path: data/test-*
80
+ ---
81
+
82
+ # Dataset Card for Housing_Dataset
83
+
84
+ This typical dataset contains all the building permits issued or in progress
85
+ within the city of Seattle starting from 2000 to recent, and this dataset is
86
+ still updating as time flows. Information includes permit records urls,
87
+ detailed address, and building costs etc., which will be presented in the `housing_dataset.py`
88
+ file and the following description
89
+
90
+ ## Dataset Details
91
+
92
+ ### Dataset Description
93
+
94
+ This [**Seattle Housing permits dataset**](https://data.seattle.gov/Permitting/Building-Permits/76t5-zqzr/about_data)
95
+ is authorized by Seattle Government and could be found in Seattle Government open data portal.
96
+ The Building Permits dataset from the City of Seattle's Open Data portal provides comprehensive information about building permits issued or currently in progress within Seattle.
97
+ This dataset, which dates back to 1990 and continues to be updated, includes a wide range of details such as permit numbers, types, descriptions,
98
+ estimated project costs, and related contractor information could be found in the .csv table in the official website, which in total contains 25 columns.
99
+ Moreover, Seattle is divided in 13 Neighborhood District. Based on the [Seattle Neighborhood District GeoJson File](https://data-seattlecitygis.opendata.arcgis.com/datasets/SeattleCityGIS::neighborhood-map-atlas-districts/about) found on Seattle government website,
100
+ there will a new column created, namely NeighborhoodDistrict. With the provided GeoJson file, every housing will be assigned to the corresponding
101
+ neighborhood district using the `Latitude` and `Longitude` columns in the csv for future usage.
102
+
103
+ - **Curated by:** [Seattle Government Open data portal](https://data.seattle.gov/)
104
+ - **Language(s) (NLP):** [English]
105
+ - **License:** [Public Domain by Seattle Government](http://www.seattle.gov/sdci)
106
+
107
+ ### Dataset Sources
108
+
109
+ - **Offical Website:** [https://data.seattle.gov/]
110
+ - **Repository for Cleaned Dataset:** [https://github.com/HathawayLiu/Housing_dataset]
111
+
112
+ ## Uses
113
+
114
+ The Building Permits dataset from the City of Seattle is intended for use in various urban development and research applications.
115
+ It can assist in understanding building trends in Seattle, aid city planning, and support academic research on urban development.
116
+ The dataset is also a valuable tool for residents and businesses to stay informed about construction activities and regulations in the city.
117
+ Specifically for residents, this dataset provides starting information for choosing future housing by looking at housing cost, neighborhood district,
118
+ and other information in the dataset.
119
+ Additionally, it supports transparency and public engagement in city planning processes.
120
+
121
+ ### Direct Use
122
+
123
+ The Building Permits dataset from the City of Seattle is suitable for several use cases:
124
+ - **Urban Planning and Development:** Planners and developers can analyze trends in building permits to inform city development strategies and infrastructure planning.
125
+ - **Academic Research:** Researchers in urban studies, economics, and social sciences can use the data for studies on urban growth, housing, and economic activity.
126
+ - **Real Estate Analysis:** Real estate professionals can assess building activities in neighborhoods for market analysis and investment decisions.
127
+ - **Public Awareness:** The general public can use this data to stay informed about construction activities and developmental changes in their community.
128
+ - **Government and Policy Making:** Local government officials can utilize this data to make informed decisions on housing policies, zoning laws,
129
+ and community development projects.
130
+ - **Residents housing choice:** Residents could access this dataset for relevant information for their future housing choice.
131
+
132
+ ### Out-of-Scope Use
133
+
134
+ The Building Permits dataset from the City of Seattle should not be used for purposes that could infringe on privacy or for activities that are not in line
135
+ with ethical standards. This includes any form of misuse or malicious use such as targeting individuals or businesses based on the information provided in the dataset.
136
+ Additionally, the dataset may not be suitable for applications requiring highly specialized or non-public information about building structures,
137
+ as it primarily contains permit-related data.
138
+
139
+ ## Dataset Structure
140
+
141
+ The cleaned and modified full dataset[`Building_Permits_Cleaned.csv`], the splited train[`housing_train_dataset.csv`] and test[`housing_test_dataset.csv`] dataset
142
+ are provided in the following Github Repo: [https://github.com/HathawayLiu/Housing_dataset]. The cleaned train and test dataset are also provided in the **`data`**
143
+ folder of this repo.
144
+
145
+ The cleaned dataset in total contains 26 columns:
146
+ - **`PermitNum`(string):** The tracking number used to refer to this permit in SDCI's tracking system.
147
+ - **`PermitClass`(string):** The permit class tells you the type of project.
148
+ - **`PermitClassMapped`(string):** A description of whether the permit is for a residential or non-residential project.
149
+ - **`PermitTypeMapped`(string):** The permit type by category, such as building, demolition, roofing, grading, and environmentally critical areas.
150
+ - **`PermitTypeDesc`(string):** Additional information about the type of permit. For example, whether it is an addition/alternation or a new project.
151
+ - **`Description`(string):** A brief description of the work that will be done under this permit. This description is subject to change before SDCI issues the permit. The description is generally more stable if we have issued the permit. Very long descriptions have been truncated.
152
+ - **`HousingUnits`(int):** The number of housing units included at the beginning of the project.
153
+ - **`HousingUnitsRemoved`(int)** The number of housing units removed during the project.
154
+ - **`HousingUnitsAdded`(int):** The number of housing units added during the project.
155
+ - **`EstProjectCost`(float):** The estimated project cost of the work being proposed is based on fair market value (parts plus labor). The estimated cost (if any) represents the best available information to date, and is subject to change if the project is modified. We do not collect the estimated project cost for all permit types.
156
+ - **`AppliedDate`(string):** The date SDCI accepted the application as a complete submittal.
157
+ - **`IssuedDate`(string):** The date SDCI issued the permit. If there is an Application Date but no Issue Date, this generally means the application is still under review.
158
+ - **`ExpiresDate`(string):** The date the application is due to expire. Generally, this is the date by which work is supposed to be completed (barring renewals or further extensions). If there is not an Expiration Date, this generally means the permit has not been issued.
159
+ - **`CompletedDate`(string):** The date the permit had all its inspections completed. If there is an Issue Date but not a Completed Date, this generally means the permit is still under inspection.
160
+ - **`RelatedMup`(string):** The land use permit that is related to this building permit, if there is one.
161
+ - **`OriginalAddress1`(string):** The street name and number of the project.
162
+ - **`OriginalCity`(string):** The city for the project's address.
163
+ - **`OriginalState`(string):** The state for the project's address.
164
+ - **`OriginalZip`(string):** The Zip code for the project's address.
165
+ - **`ContractorCompanyName`(string):** The contractor(s) associated with this permit.
166
+ - **`Link`(string):** A link to view full details and current status information about this permit at SDCI's website.
167
+ - **`Latitude`(float):** Latitude of the worksite where permit activity occurs. May be missing for a small number of permits considered "unaddressable."
168
+ - **`Longitude`(float):** Longitude of the worksite where permit activity occurs. May be missing for a small number of permits considered "unaddressable."
169
+ - **`Location1`(string):** The latitude and longitude location for mapping purposes.
170
+ - (New added column)**`NeighborhoodDistrict`(string):** The district that the housing belongs to according to location
171
+
172
+ ## Dataset Creation
173
+
174
+ ### Curation Rationale
175
+
176
+ The Building Permits dataset from the City of Seattle was created to foster transparency, public awareness, and engagement in the city's urban development processes.
177
+ It provides residents, professionals, and researchers with detailed information about building activities, facilitating informed decision-making and community involvement in city planning and development.
178
+ Regarding the importance fo 13 neighborhood districts in Seattle, the new added columns for corresponding neighborhood district gives chance for residents and government
179
+ to investigate the building activities and life quality in the aspect of different neighborhood districts.
180
+ The dataset supports the city's commitment to open data and the promotion of data-driven insights for improving urban infrastructure and living conditions.
181
+
182
+
183
+ #### Data Collection and Processing
184
+
185
+ The Building Permits dataset is collected by Seattle Government where it contains all of the recent information about housing permits in Seattle. The dataset is published on
186
+ Seattle Government Open Data Portal and it's keep updating along with time. You can download the raw data from [Seattle Government Website](https://data.seattle.gov/Permitting/Building-Permits/76t5-zqzr/about_data)
187
+ in different formats. For my own purpose I downloaded the CSV version that updated until the modified time of this repo and you can find it in the following Github Repo:[https://github.com/HathawayLiu/Housing_dataset]
188
+ (File name: `Building_Permits_20240213.csv`). To process and clean the dataset, I did the following steps:
189
+ 1. Pre-process the data to make sure that they are in the correct types.
190
+ 2. Use the provided `latitude` and `longitude` columns in the dataset along with Google GeoCoding API to fill in the blanks for the `OriginalZip`(Zip code) column.
191
+ 3. Use the provided `latitude` and `longitude` columns and the GeoJSon file of Seattle Neighborhood District to assign building permits to their corresponding neighborhood districts.
192
+ 4. (The GeoJSon file of Seattle Neighborhood District could be found under this GitHub Repo:[https://github.com/HathawayLiu/Housing_dataset]. You could also download it through Seattle GeoData Portal:https://data-seattlecitygis.opendata.arcgis.com/datasets/SeattleCityGIS::neighborhood-map-atlas-districts/about)
193
+ 5. Fill in the blanks left in the dataset with `N/A` for easier future use
194
+ 6. Split the dataset into train and test set for future use.
195
+
196
+ For more details about data cleaning and processing, you could refer to the `data_cleaning.py` file under this repo. Notice that to be able to use the function to get zipcode,
197
+ you need to use your own API Key. Applying for a Google GeoCoding API is free. You could simply follow this link to apply it: https://developers.google.com/maps/documentation/geocoding/get-api-key
198
+ You are more than welcome to download the raw data and process the dataset yourself.
199
+
200
+ To load the dataset, you could use the following command:
201
+ ```python
202
+ !pip install datasets
203
+ from datasets import load_dataset
204
+ dataset = load_dataset("HathawayLiu/housing_dataset", trust_remote_code=True)
205
+ ```
206
+ To generate the exmaple from train/test set, use:
207
+ ```python
208
+ next(iter(dataset['train']))
209
+ ## next(iter(dataset['test']))
210
+ ```
211
+ You can see the example from dataset like the following:
212
+ ```
213
+ {'PermitNum': '6075593-CN',
214
+ 'PermitClass': 'Single Family/Duplex',
215
+ 'PermitClassMapped': 'Residential',
216
+ 'PermitTypeMapped': 'Building',
217
+ 'PermitTypeDesc': 'Addition/Alteration',
218
+ 'Description': 'Replace existing windows; Upgrade new windows and framing for existing single family residence subject to field inspection',
219
+ 'HousingUnits': 0,
220
+ 'HousingUnitsRemoved': 0,
221
+ 'HousingUnitsAdded': 0,
222
+ 'EstProjectCost': 43014.0,
223
+ 'AppliedDate': '10/12/05',
224
+ 'IssuedDate': '10/12/05',
225
+ 'ExpiresDate': '4/12/07',
226
+ 'CompletedDate': '2/1/06',
227
+ 'StatusCurrent': 'Completed',
228
+ 'RelatedMup': 'nan',
229
+ 'OriginalAddress1': '624 NW 88TH ST',
230
+ 'OriginalCity': 'SEATTLE',
231
+ 'OriginalState': 'WA',
232
+ 'OriginalZip': 98117,
233
+ 'ContractorCompanyName': 'STATEWIDE INC',
234
+ 'Link': 'https://cosaccela.seattle.gov/portal/customize/LinkToRecord.aspx?altId=6075593-CN',
235
+ 'Latitude': 47.692996978759766,
236
+ 'Longitude': -122.36441040039062,
237
+ 'Location1': '47.69299754, -122.3644121',
238
+ 'NeighborDistrict': 'Northwest'}
239
+ ```
240
+ #### Who are the source data producers?
241
+
242
+ The Building Permits dataset is originally created and maintained by the City of Seattle, specifically by its Department of Construction and Inspections.
243
+ This department is responsible for overseeing building and land use in Seattle, ensuring safety and compliance with city codes.
244
+ The dataset reflects the department's ongoing work in managing and documenting building permits issued in the city.
245
+ For detailed information, visit the [Seattle Department of Construction & Inspections](https://www.seattle.gov/sdci).
246
+
247
+ ## Bias, Risks, and Limitations
248
+
249
+ The Building Permits dataset from the City of Seattle has both technical and sociotechnical limitations:
250
+ 1. **Technical Limitations**:
251
+ - **Data Completeness**: Not all building permits may be captured, especially older records. Data for specific columns like `IssuedDate`, `CompletedDate`, `AppliedDate`,
252
+ `RelatedMup`, and etc. contains lots of missing values.
253
+ - **Data Accuracy**: There may be errors or inconsistencies in the data, especially in historical records.
254
+ - **Timeliness**: The dataset might not be updated in real-time, causing delays in reflecting the most current information.
255
+
256
+ 2. **Sociotechnical Limitations**:
257
+ - **Privacy Concerns**: Detailed permit data could potentially be used to infer private information about property owners or residents.
258
+ - **Bias in Planning Decisions**: The data might be used to reinforce existing biases in urban planning, affecting marginalized communities.
259
+ - **Dependence on Technical Proficiency**: The dataset's utility is limited by the user's ability to interpret and analyze the data effectively.
260
+ 3. **Bias**: The dataset reflects only permitted construction, not all building activities. This can bias analyses towards formal, recorded developments, overlooking informal or unpermitted construction.
261
+ 4. **Risk**: Misuse can occur if data is used to unfairly target specific neighborhoods or communities for enforcement or political reasons.
262
+ These limitations should be considered when using this dataset for research, policy-making, or urban planning.
263
+
264
+ ### Recommendations
265
+
266
+ To address the bias and limitations above, users should intake the following recommendations:
267
+ - **Cross-Verification**: Use supplementary data sources for a more comprehensive view.
268
+ - **Privacy and Ethical Use**: Handle data responsibly, respecting privacy and avoiding discriminatory practices.
269
+ - **Data Cleaning and Validation**: Regularly update and clean the dataset to maintain accuracy and reliability.
data/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48dbac19102ef383e1c7ec4b8c07948c4dc9b3d4de2ad58731b02f2dc8753d21
3
+ size 3614777
data/train-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:924042cce7901edcf0a9c676de5752d639b8bbc4b3b5b5dccc32f2f6a0e973d1
3
+ size 14461243
data_cleaning.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '''
2
+ git clone https://github.com/geopandas/geopandas.git
3
+ cd geopandas
4
+ pip install .
5
+ '''
6
+ import requests
7
+ import pandas as pd
8
+ import numpy as np
9
+ import requests
10
+ import geopandas as gpd
11
+ from shapely.geometry import Point
12
+
13
+ # load neighborhood GeoJson file and housing dataset
14
+ neighborhood = gpd.read_file("https://raw.githubusercontent.com/HathawayLiu/Housing_dataset/main/Neighborhood_Map_Atlas_Districts.geojson")
15
+ url = "https://github.com/HathawayLiu/Housing_dataset/raw/main/Building_Permits_20240213.csv"
16
+ df = pd.read_csv(url)
17
+
18
+ # Pre-processing of data
19
+ df['OriginalZip'] = pd.to_numeric(df['OriginalZip'], errors='coerce').fillna('NA').astype(str)
20
+ df['OriginalZip'] = df['OriginalZip'].replace(0, 'NA')
21
+ df['OriginalCity'] = df['OriginalCity'].fillna('SEATTLE')
22
+ df['OriginalState'] = df['OriginalState'].fillna('WA')
23
+ df['EstProjectCost'] = pd.to_numeric(df['EstProjectCost'], errors='coerce').astype(float)
24
+ df['IssuedDate'] = pd.to_datetime(df['IssuedDate'], errors='coerce')
25
+ df['HousingUnits'] = pd.to_numeric(df['HousingUnits'], errors='coerce').fillna(0).astype(int)
26
+ df['HousingUnitsRemoved'] = pd.to_numeric(df['HousingUnitsRemoved'], errors='coerce').fillna(0).astype(int)
27
+ df['HousingUnitsAdded'] = pd.to_numeric(df['HousingUnitsAdded'], errors='coerce').fillna(0).astype(int)
28
+ df['Longitude'] = pd.to_numeric(df['Longitude'], errors='coerce')
29
+ df['Latitude'] = pd.to_numeric(df['Latitude'], errors='coerce')
30
+
31
+ # Function to get the zip code from coordinates
32
+ def get_zip_code_from_coordinates(latitude, longitude, api_key):
33
+ if pd.isna(latitude) or pd.isna(longitude):
34
+ return 'NA' # Return 'NA' if latitude or longitude is NaN
35
+
36
+ api_url = f"https://maps.googleapis.com/maps/api/geocode/json?latlng={latitude},{longitude}&key={api_key}"
37
+ response = requests.get(api_url)
38
+
39
+ if response.status_code == 200:
40
+ data = response.json()
41
+ if data['results']:
42
+ for component in data['results'][0]['address_components']:
43
+ if 'postal_code' in component['types']:
44
+ return component['long_name']
45
+ return 'NA' # Return 'NA' if no zip code found
46
+ else:
47
+ return 'NA' # Return 'NA' for non-200 responses
48
+
49
+ # Apply the function only to rows where 'OriginalZip' is 'NA'
50
+ api_key = 'Your Own API Key'
51
+ for index, row in df.iterrows():
52
+ if row['OriginalZip'] == 'NA':
53
+ zip_code = get_zip_code_from_coordinates(row['Latitude'], row['Longitude'], api_key)
54
+ df.at[index, 'OriginalZip'] = zip_code
55
+ print(f"Updated row {index} with Zip Code: {zip_code}")
56
+
57
+ # Function to get corresponding neighborhood district from coordinates
58
+ gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude), crs='EPSG:4326')
59
+ def get_neighborhood_name(point, neighborhoods):
60
+ for _, row in neighborhoods.iterrows():
61
+ if point.within(row['geometry']):
62
+ print(row['L_HOOD'])
63
+ return row['L_HOOD']
64
+ return 'NA'
65
+ # Apply the function to each row
66
+ gdf['NeighborDistrict'] = gdf['geometry'].apply(lambda x: get_neighborhood_name(x, neighborhood) if pd.notna(x) else 'NA')
67
+ # Merge the new column back to the original DataFrame
68
+ df['NeighborDistrict'] = gdf['NeighborDistrict']
69
+ # filtered df to start from year 2000
70
+ df_filtered = df[df['IssuedDate'].dt.year >= 2000]
71
+ df_filtered['IssuedDate'] = df['IssuedDate'].astype(str)
72
+ df_filtered.fillna('NA', inplace=True)
73
+
74
+ '''
75
+ Following code is for spliting datasets in train and test dataset
76
+ '''
77
+ # Read the dataset
78
+ housing_df = pd.read_csv('https://github.com/HathawayLiu/Housing_dataset/raw/main/Building_Permits_Cleaned.csv')
79
+ # Shuffle the dataset
80
+ housing_df = housing_df.sample(frac=1).reset_index(drop=True)
81
+
82
+ # Splitting the dataset into training and test sets
83
+ split_ratio = 0.8 # 80% for training, 20% for testing
84
+ split_index = int(len(housing_df) * split_ratio)
85
+
86
+ train_df = housing_df[:split_index]
87
+ test_df = housing_df[split_index:]
88
+
89
+ # Export to CSV
90
+ train_df.to_csv('/Users/hathawayliu/Desktop/train_dataset.csv', index=False)
91
+ test_df.to_csv('/Users/hathawayliu/Desktop/test_dataset.csv', index=False)