index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
66,471 | PorterDalton1/Text_Adventure | refs/heads/master | /window_GUI.py | """
This file provides the GUI for the text adventure.
"""
import tkinter
class WindowBase:
def __init__(self, master):
self.master = master
self.master.configure(background = "black")
self.master.minsize(600, 400)
def main():
x = tkinter.Tk()
WindowBase(x)
x.mainloop()
if __name__ == '__main__':
main() | {"/main.py": ["/window_GUI.py"]} |
66,488 | SameerJain901/CoronaVisualizer | refs/heads/master | /loader.py | import pandas as pd
import numpy as np
import json
import datetime
import geojson_rewind
import plotly.graph_objects as go
import plotly.io as pio
cdata=pd.read_csv('cdata.csv')
fdata=pd.read_csv('forecasts.csv')
final_data=pd.read_csv("final_data.csv")
map_json = json.load(open("out2.json"))
def prepare_mapviz(cdata):
state_id_map = {}
india_states = geojson_rewind.rewind(map_json, rfc7946=False)
for feature in india_states["features"]:
feature["id"] = feature["properties"]["state_code"]
state_id_map[feature["properties"]["st_nm"]] = feature["id"]
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Delhi",'NCT of Delhi'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Arunachal Pradesh",'Arunanchal Pradesh'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Ladakh",'Jammu & Kashmir'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Jammu and Kashmir",'Jammu & Kashmir'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Dadra and Nagar Haveli and Daman and Diu",'Dadara & Nagar Havelli'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Andaman and Nicobar Islands",'Andaman & Nicobar Island'))
cdata["GeoID"] = cdata["State"].apply(lambda x: 'StateUnassigned' if x=='State Unassigned' else state_id_map[x])
return cdata
def initialize():
pio.templates["solarized"] = go.layout.Template({
'layout': {'geo': {'bgcolor': 'rgb(0,43,54)',
'lakecolor': 'rgb(0,43,54)',
'landcolor': 'rgb(0,43,54)',
'showlakes': True,
'showland': True,
'subunitcolor': '#506784'},
'polar':{'angularaxis': {'gridcolor': '#506784',
'linecolor': '#506784', 'ticks': ''},
'bgcolor': 'rgb(0,43,54)',
'radialaxis': {'gridcolor': '#506784',
'linecolor': '#506784', 'ticks': ''}},
'paper_bgcolor': 'rgb(0,43,54)',
'plot_bgcolor': 'rgb(0,43,54)'
}}
)
pio.templates["quiet_light"] = go.layout.Template({
'layout': {'geo': {'bgcolor': 'rgb(240,240,245)',
'lakecolor': 'rgb(240,240,245)',
'landcolor': 'rgb(240,240,245)',
'showlakes': True,
'showland': True,
'subunitcolor': '#506784'},
'polar':{'angularaxis': {'gridcolor': '#506784',
'linecolor': '#506784', 'ticks': ''},
'bgcolor': 'rgb(240,240,245)',
'radialaxis': {'gridcolor': '#506784',
'linecolor': '#506784', 'ticks': ''}},
'paper_bgcolor': 'rgb(240,240,245)',
'plot_bgcolor': 'rgb(240,240,245)'
}}
)
| {"/visualizer_heroku.py": ["/loader.py"]} |
66,489 | SameerJain901/CoronaVisualizer | refs/heads/master | /visualizer_heroku.py | import streamlit as st
import os
import os.path
import sys
import time
import pandas as pd
import numpy as np
import re
import requests as rq
from bs4 import BeautifulSoup
import json
import datetime
import logging
import plotly.graph_objects as go
import plotly.io as pio
import SessionState
from plotly.subplots import make_subplots
import geojson_rewind
import plotly.express as px
import loader
import streamlit.components.v1 as components
import pickle as pk
import plotly.colors as co
##########################################################Definitions
# os.chmod('.streamlit/config.toml',755)
def prepare_mapviz(cdata,india_states):
state_id_map = {}
for feature in india_states["features"]:
feature["id"] = feature["properties"]["state_code"]
state_id_map[feature["properties"]["st_nm"]] = feature["id"]
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Delhi",'NCT of Delhi'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Arunachal Pradesh",'Arunanchal Pradesh'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Ladakh",'Jammu & Kashmir'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Jammu and Kashmir",'Jammu & Kashmir'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Dadra and Nagar Haveli and Daman and Diu",'Dadara & Nagar Havelli'))
cdata['State'] = cdata['State'].apply(lambda x: x.replace("Andaman and Nicobar Islands",'Andaman & Nicobar Island'))
cdata["GeoID"] = cdata["State"].apply(lambda x: 'StateUnassigned' if x=='State Unassigned' else state_id_map[x])
return cdata
##########################################################Prepare_data
loader.initialize()
session_state = SessionState.get(IS_scrapped=False)
india_states = json.load(open("out2.json"))
india_states=geojson_rewind.rewind(india_states, rfc7946=False)
final_data=pd.read_csv("final_data.csv")
cdata=prepare_mapviz(final_data.copy(),india_states)
##########################################################
#-------------------------------------------------------------------------------------------------------- Styles
quietlight='''<div class="lds-hourglass"></div>
<style type='text/css'>
.lds-hourglass {
display: inline-block;
position: relative;
width: 80px;
height: 80px;
}
.lds-hourglass:after {
content: " ";
display: block;
border-radius: 50%;
width: 0;
height: 0;
margin: 8px;
box-sizing: border-box;
border: 32px solid #262730;
border-color: #262730 transparent #262730 transparent;
animation: lds-hourglass 1.2s infinite;
}
@keyframes lds-hourglass {
0% {
transform: rotate(0);
animation-timing-function: cubic-bezier(0.55, 0.055, 0.675, 0.19);
}
50% {
transform: rotate(900deg);
animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1);
}
100% {
transform: rotate(1800deg);
}
}
</style>
'''
dark='''
<div class="lds-hourglass"></div>
<style type='text/css'>
.lds-hourglass {
display: inline-block;
position: relative;
width: 80px;
height: 80px;
}
.lds-hourglass:after {
content: " ";
display: block;
border-radius: 50%;
width: 0;
height: 0;
margin: 8px;
box-sizing: border-box;
border: 32px solid #fafafa;
border-color: #fafafa transparent #fafafa transparent;
animation: lds-hourglass 1.2s infinite;
}
@keyframes lds-hourglass {
0% {
transform: rotate(0);
animation-timing-function: cubic-bezier(0.55, 0.055, 0.675, 0.19);
}
50% {
transform: rotate(900deg);
animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1);
}
100% {
transform: rotate(1800deg);
}
}
</style>
'''
#---------------------------------------------------------------------------------------------------------
# Theming
extra ='''
[server]
runOnSave = true
headless = true
enableCORS = true
'''
def update_theme(primaryColor,backgroundColor,secondaryBackgroundColor,textColor,font):
# Theme Base
theme_data=['[theme]\n\n','primaryColor=\"%s\"\n'%(primaryColor),
'backgroundColor=\"%s\"\n'%(backgroundColor),
'secondaryBackgroundColor=\"%s\"\n'%(secondaryBackgroundColor),
'textColor=\"%s\"\n'%(textColor),
'font=\"%s\"\n'%(font)]
# os.remove('.streamlit/config.toml')
theme_file=open('.streamlit/config.toml','w+')
theme_file.writelines(theme_data)
theme_file.write(extra)
theme_file.close()
# SessionState.trigger_rerun()
# Starting with the process
# Side Bar comfiguration
st.set_page_config( # Alternate names: setup_page, page, layout
layout="wide", # Can be "centered" or "wide". In the future also "dashboard", etc.
initial_sidebar_state="expanded", # Can be "auto", "expanded", "collapsed"
)
st.sidebar.title('Covid19 Data Visualizer')
pages=['Latest News','Google\'s Mobility Report','Vaccination Reports',
'Map','Data Reports','Forecast Reports',
]
out=st.sidebar.radio('Page:',pages)
st.sidebar.header('Theme:')
theme=st.sidebar.selectbox('Select your theme:',['Dark','Light','Quiet-Light','Solarized'],)
selected_theme=st.sidebar.empty()
# End of side bar configuration
# Theme Configuration
if theme=='Quiet-Light':
primaryColor="#6eb52f"
backgroundColor="#f0f0f5"
secondaryBackgroundColor="#e0e0ef"
textColor="#262730"
font="sans serif"
elif theme=='Dark':
primaryColor="#F63366"
backgroundColor="#0e1117"
secondaryBackgroundColor="#31333F"
textColor="#fafafa"
font="sans serif"
elif theme=='Light':
primaryColor="#f63366"
backgroundColor="#FFFFFF"
secondaryBackgroundColor="#f0f2f6"
textColor="#262730"
font="sans serif"
else:
primaryColor="#d33682"
backgroundColor="#002b36"
secondaryBackgroundColor="#586e75"
textColor="#fafafa"
font="Monospace"
update_theme(primaryColor,backgroundColor,secondaryBackgroundColor,textColor,font)
########################################################################################################################################################################
#################################################################################### Functions #########################################################################
########################################################################################################################################################################
def magnify():
return [dict(selector="th",
props=[("font-size", "7pt")]),
dict(selector="td",
props=[('padding', "4px 4px"),]),
dict(selector="th:hover",
props=[("font-size", "13pt")]),
dict(selector="tr:hover td:hover",
props=[('max-width', '200px'),
('font-size', '14pt'),
('color', '#4f6d7a')])]
def prepare_vtdata():
df = pd.read_csv("vaccine_doses_statewise.csv",index_col='State')
total=[]
for index in df.index:
total.append(df[df.index==index].sum(axis=1).values[0])
df['Total']=total
df['Status']='Vaccinated'
df.reset_index(inplace=True)
df.drop([df[df.State=='Miscellaneous'].index[0],df[df.State=='Total'].index[0]],inplace=True,axis=0)
df_vacc=pd.DataFrame(df,columns=['Total','Status','State'])
dt=pd.read_csv("statewise_tested_numbers_data.csv")
dt['Updated On']=pd.to_datetime(dt['Updated On'],format='%d/%m/%Y')
df_test=pd.DataFrame(columns=['State',"Status",'Total'])
dt_grp=dt.groupby(dt['Updated On']).get_group((datetime.datetime.now()-datetime.timedelta(days=1)).strftime("%d"+"/"+"%m"+"/"+"%Y"))
df_test["Total"]=dt_grp["Total Tested"].fillna(0)
df_test['Status']='Tested'
df_test['State']=dt_grp['State'].unique()
if (dt_grp['State']=='Dadra and Nagar Haveli and Daman and Diu').all()==False:
df_test.loc[len(df_test.index)] = ['Dadra and Nagar Haveli and Daman and Diu', "Tested", 0]
return (df_vacc, df_test)
def scrape_data():
card_data={'img_src':[],'link':[],'text':[]}
with st.spinner('Getting Latest News'):
res=rq.get("https://www.indiatoday.in/coronavirus")
soup=BeautifulSoup(res.text)
corona_data=soup.find('div',id='block-views-pollution-coronavirus')
pollution_left=corona_data.find_all('div',class_='pollution-left')
pollution_right=corona_data.find_all('div',class_='pollution-right')
for each_pollution in pollution_left:
card_data['img_src'].append(each_pollution.a.img.get('src'))
card_data['link'].append('https://www.indiatoday.in/'+each_pollution.a.get('href'))
for each_pollution in pollution_right:
card_data['text'].append(each_pollution.text)
res=rq.get("https://www.india.com/topic/coronavirus/")
soup=BeautifulSoup(res.text)
aside=soup.find('aside',class_="row topic-strlist")
corona_data=aside.find('ul')
for each_item in corona_data.find_all('li'):
card_data['link'].append(each_item.a.get('href'))
card_data['img_src'].append(each_item.a.img.get('data-lazy-src'))
card_data['text'].append(each_item.div.h3.a.get_text())
print('Dumped Data')
pk.dump(card_data,open('card_data_2.pkl','wb'))
def quick_plot(data,categ,color,img):
fig=go.Figure()
fig.add_traces( go.Scatter(x=data.index, y=data[categ],
fill='tozeroy',
visible=True,
marker={'color':color},
showlegend=False,
name=categ.replace('_',' ').capitalize()
)
)
fig.add_layout_image(
dict(
source='https://image.flaticon.com/icons/png/512/3579/3579748.png',
xref="paper", yref="paper",
x=1, y=1.05,
sizex=0.2, sizey=0.2,
xanchor="right", yanchor="bottom",
sizing="contain",
opacity=0.5,
layer="below")
)
fig.update_layout(title=categ.replace('_',' ').capitalize(),dragmode=False)
return fig
def getmr_theme(theme):
if theme=='Light':
return 'light'
elif theme=='Dark':
return 'dark'
elif theme=='Quiet-Light':
return 'light'
else:
return 'dark'
def get_map_theme(theme):
if theme=='Light':
return 'plotly'
elif theme=='Dark':
return 'plotly_dark'
elif theme=='Quiet-Light':
return 'plotly+quiet_light'
else:
return 'plotly_dark+solarized'
def get_color_scale(info):
if info=='Hospitalized':
return ['#DB8EE1','#8C2C85']
elif info=='Recovered':
return ['#60D0C9','#195942']
elif info=='Deceased':
return ['#94B6C7','#4F7592']
else:
return ['#C5BDB8','#5B504A']
def plot_map_all(cdata,date,india_states,info,theme):
qt=cdata[cdata['Date']==date]
missed_data={key:[] for key in qt}
for state in set(cdata.State.unique()).difference(set(cdata[cdata['Date']==date].State)):
missed_data['Date'].append(date)
missed_data['State'].append(state)
missed_data['Deceased'].append(0)
missed_data['Recovered'].append(0)
missed_data['Migrated_Other'].append(0)
missed_data['Hospitalized'].append(0)
missed_data['GeoID'].append(str(cdata[cdata.State==state].iloc[0]['GeoID']))
qt=qt.append(pd.DataFrame(missed_data))
state=list(qt['State'])
vals=list(qt[info])
hoverdata=['<b>{}</b>:'.format(se) for se in state]
for i in range(len(hoverdata)):
hoverdata[i]=hoverdata[i]+' {}'.format(vals[i])
fig = px.choropleth(
qt,
locations="GeoID",
geojson= india_states,
color=info,
center = {"lat": 22.4797, "lon": 77.8969},
title="India Corona Stats",
color_continuous_scale=get_color_scale(info)
)
fig.update_geos(fitbounds="locations", visible=False)
fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0},dragmode=False,template=theme)
fig.update_traces(hovertemplate=hoverdata)
return fig
def offset_signal(signal, marker_offset):
if abs(signal) <= marker_offset:
return 0
return signal - marker_offset if signal > 0 else signal + marker_offset
def forecast_india(daywise,n_days,theme=get_map_theme(theme)):
cdata=pd.read_csv('forecasts.csv')
cdata.set_index('date',inplace=True)
state='India'
x_date=cdata.index
fig_data=[]
fig_layouts=[]
titles=['Hospitalized in %s'%(state),'Recovered in %s'%(state),
'Deceased in %s'%(state),'Migrated_Other in %s'%(state)]
fig = make_subplots(rows=4,cols=1,shared_xaxes=True,vertical_spacing=0.05,subplot_titles=titles,)
cc=1
color={'Hospitalized':['#eb3758','#eb3a37'],'Recovered':['#36eb58','#36eba0'],
'Deceased':['#767f7b','#848685'],'Migrated_Other':['#974cc9','#b04cc9']}
for status in ['Hospitalized','Recovered','Deceased','Migrated_Other']:
y_status=list(cdata[status])[-n_days:]
fig.add_trace(go.Scatter(
x=x_date,
y=y_status,
mode='markers',
marker=dict(color=color[status][0])
),row=cc,col=1)
xref,yref=None,None
if cc==1:
xref='x'
yref='y'
else:
xref='x'+str(cc)
yref='y'+str(cc)
fig_layouts.extend([dict(
type='line',
xref=xref,
yref=yref,
x0=x_date[i],
y0=0,
x1=x_date[i],
y1=offset_signal(y_status[i], marker_offset=0.04),
line=dict(
color=color[status][1],
width=1
)
) for i in range(len(y_status))])
cc+=1
fig.update_layout(showlegend=False,template=theme,
shapes=fig_layouts,height=900,
)
# fig.show()
return fig
def daywise_india(daywise,n_days,theme=get_map_theme(theme)):
cdata=daywise
cdata.set_index('date',inplace=True)
state='India'
x_date=cdata.index
fig_data=[]
fig_layouts=[]
titles=['Hospitalized in %s'%(state),'Recovered in %s'%(state),
'Deceased in %s'%(state),'Migrated_Other in %s'%(state)]
fig = make_subplots(rows=4,cols=1,shared_xaxes=True,vertical_spacing=0.05,subplot_titles=titles,)
cc=1
color={'Hospitalized':['#eb3758','#eb3a37'],'Recovered':['#36eb58','#36eba0'],
'Deceased':['#767f7b','#848685'],'Migrated_Other':['#974cc9','#b04cc9']}
for status in ['Hospitalized','Recovered','Deceased','Migrated_Other']:
y_status=list(cdata[status])[-n_days:]
fig.add_trace(go.Scatter(
x=x_date,
y=y_status,
mode='markers',
marker=dict(color=color[status][0])
),row=cc,col=1)
xref,yref=None,None
if cc==1:
xref='x'
yref='y'
else:
xref='x'+str(cc)
yref='y'+str(cc)
fig_layouts.extend([dict(
type='line',
xref=xref,
yref=yref,
x0=x_date[i],
y0=0,
x1=x_date[i],
y1=offset_signal(y_status[i], marker_offset=0.04),
line=dict(
color=color[status][1],
width=1
)
) for i in range(len(y_status))])
cc+=1
fig.update_layout(showlegend=False,template=theme,
shapes=fig_layouts,height=900,
)
# fig.show()
return fig
#-----------------------------------------------------------------------------------------------------------------------------
if out=='Map':
st.header('Map')
Dates=pd.to_datetime(cdata.Date,dayfirst=True)
date=st.date_input('Select Day',min_value=Dates.min(),max_value=Dates.max())
# state=st.selectbox('Select State',list(loader.get_cdata().State.unique()))
info=st.selectbox('Select Information Type',['Hospitalized','Recovered','Migrated_Other','Deceased'])
date=pd.to_datetime(date,dayfirst=True)
config = dict({'scrollZoom': False,
'displayModeBar': False,
'editable': False})
with st.spinner('Preparing Map'):
st.plotly_chart(
plot_map_all(cdata,date.strftime('%d/%m/%Y'),india_states,info,get_map_theme(theme)),
use_container_width=False,**{'config':config})
if out=='Latest News':
if session_state.IS_scrapped:
print('Skipping Scraping')
else:
scrape_data()
session_state.IS_scrapped=True
st.header('Latest News')
card_data=pk.load(open('card_data_2.pkl','rb'))
my_card='\n'.join(open('card.html','r').readlines())
my_card=my_card.replace('st_backgroundColor',backgroundColor)
my_card=my_card.replace('st_textColor',textColor)
my_card=my_card.replace('st_secondaryBackgroundColor',secondaryBackgroundColor)
rows=int(len(card_data['img_src'])/3)
if rows*3>len(card_data['img_src']):
rows-=1
ind=0
for i in range(rows):
new_card=my_card
for i in range(1,4):
new_card=new_card.replace('st_link_'+str(i),card_data['link'][ind])
new_card=new_card.replace('st_text_'+str(i),card_data['text'][ind])
new_card=new_card.replace('st_img_src_'+str(i),card_data['img_src'][ind])
ind+=1
components.html(new_card,height=400,width=1200)
if out=='Google\'s Mobility Report':
mr_2020=pd.read_csv('2020_IN_Region_Mobility_Report.csv')
mr_2021=pd.read_csv('2021_IN_Region_Mobility_Report.csv')
IN20=mr_2020[mr_2020.sub_region_1.isna()]
mr_2020.drop(IN20.index,axis=0,inplace=True)
IN21=mr_2021[mr_2021.sub_region_1.isna()]
mr_2021.drop(IN21.index,axis=0,inplace=True)
IN20.index=IN20.date
IN21.index=IN21.date
mrdt_links={
'bus':['transit_stations_percent_change_from_baseline','#d01884','https://www.dropbox.com/s/c687b5muzd2lqrx/bus.svg?raw=1'],
'home':['residential_percent_change_from_baseline','#8430ce','https://www.dropbox.com/s/05ifsbhl84fze3c/home.svg?raw=1'],
'hospital':['grocery_and_pharmacy_percent_change_from_baseline','#129eaf','https://www.dropbox.com/s/l6spsoxatmme6ca/hospital.svg?raw=1'],
'office':['workplaces_percent_change_from_baseline','#d56e0c','https://www.dropbox.com/s/8uws8tb8kpi88eh/office.svg?raw=1'],
'park':['parks_percent_change_from_baseline','#188038','https://www.dropbox.com/s/cwiz5g67a9wh681/park.svg?raw=1'],
'cart':['retail_and_recreation_percent_change_from_baseline','#1967d2','https://www.dropbox.com/s/afnvdw1zn6q5kne/trolley.svg?raw=1']
}
kachra={
'cart':'''Mobility trends for places such as
restaurants, cafés, shopping centres,
theme parks, museums, libraries and
cinemas.''',
'hospital':'''Mobility trends for places such as
supermarkets, food warehouses,
farmers markets, specialty food
shops and pharmacies.''',
'park':'''Mobility trends for places like
national parks, public beaches,
marinas, dog parks, plazas and public
gardens.''',
'bus':'''Mobility trends for places that are public
transport hubs, such as underground, bus and
train stations.''',
'office':'''Mobility trends for places of work''',
'home':'''Mobility trends for places of residence.''',
}
simp='''
<style type='text/css'>
.shadow {
transition: .5s ease;
background-color: st_back;
color: st_tc;
}
.shadow:hover{
box-shadow:
1px 1px #373737,
2px 2px #373737,
3px 3px #373737,
4px 4px #373737,
5px 5px #373737,
6px 6px #373737;
-webkit-transform: translateX(-3px);
transform: translateX(-3px);
transition: .5s ease;
}
</style>
<div class=shadow>
st_text
</div>
'''
my_card='\n'.join(open('html_test.html','r').readlines())
header=st.empty()
header.header('Google\'s Mobility Report 2021')
with st.spinner('loading..'):
components.html(my_card.replace('theme17',getmr_theme(theme)))
st.write("The data shows how visits to places, such as corner shops and parks, are changing in each geographic region")
year=st.selectbox('Select the year for which you want to view the data: ',['2021','2020'])
if year=='2020':
header.header('Google\'s Mobility Report 2020')
else:
header.header('Google\'s Mobility Report 2021')
categ=st.selectbox('Select data type to view:',[x.capitalize() for x in list(mrdt_links.keys())])
sel_categ=mrdt_links[categ.lower()]
if year=='2021':
data=IN21
else:
data=IN20
c1,c2=st.beta_columns(2)
c1.plotly_chart(quick_plot(data,sel_categ[0],sel_categ[1],sel_categ[2]))
card_2=simp.replace('st_back',primaryColor)
card_2=simp.replace('st_tc',textColor)
card_2=card_2.replace('st_text',kachra[categ.lower()])
avg=data[sel_categ[0]].mean()
c2.markdown('''### {} compared to the baseline
'''.format(str(avg)))
components.html(card_2)
st.header('State Level Mobility Reports')
ST20=mr_2020[mr_2020.sub_region_2.isna()]
ST21=mr_2021[mr_2021.sub_region_2.isna()]
if year=='2021':
ST=ST21
else:
ST=ST20
states=ST.sub_region_1.unique()
state=st.selectbox('Select State for state level view:',states)
c1,c2=st.beta_columns(2)
c1.plotly_chart(quick_plot(ST[ST['sub_region_1']==state],sel_categ[0],sel_categ[1],sel_categ[2]))
avg=ST[ST['sub_region_1']==state][sel_categ[0]].mean()
c2.markdown('''### {} compared to the baseline
'''.format(str(avg)))
components.html(card_2)
if out=='Vaccination Reports':
st.header('Vaccination and Testing status accross india')
vacc_data, test_data = prepare_vtdata()
color_vacc=['#B12F95','#ECACD3']
color_test=['#ADABED','#2E7FDC']
config={
'scrollZoom': False,
'displayModeBar': False,
'editable': False,
}
vacc_data.sort_values(by='Total',inplace=True)
test_data.sort_values(by='Total',inplace=True)
v_col=co.n_colors(co.hex_to_rgb(color_vacc[0]),co.hex_to_rgb(color_vacc[1]),len(vacc_data.Total))
t_col=co.n_colors(co.hex_to_rgb(color_test[0]),co.hex_to_rgb(color_test[1]),len(vacc_data.Total))
fig_bar = go.Figure()
fig_bar.add_trace(
go.Bar(
x = vacc_data.Total,
y = vacc_data.State,
visible=True,showlegend=False,orientation='h',marker=dict(color=['rgb'+str(x) for x in v_col]),
)
)
fig_bar.add_trace(
go.Bar(
x = test_data.Total,
y = test_data.State,
orientation='h',marker=dict(color=['rgb'+str(x) for x in t_col]),
visible=False,
showlegend=False,
)
)
fig_bar.update_layout(dragmode=False,showlegend=False,template=get_map_theme(theme),width=1200,height=1000,
updatemenus=[go.layout.Updatemenu(
active=0,
buttons=list(
[dict(label = 'Vaccinated',
method = 'update',
args = [{'visible': [True, False]},
{'title': 'Vaccinated',
'showlegend':True}]),
dict(label = 'Tested',
method = 'update',
args = [{'visible': [False, True]},
{'title': 'Tested',
'showlegend':True}]),
]),
pad={"r": 10, "t": 2},
showactive=True,
x=1.0,
xanchor="right",
y=1.3,
yanchor="top"
)
]
)
fig=px.bar_polar(vacc_data.sort_values(by='Total')[-15:],
title='Top 15 states in vaccination',
theta="State",r='Total',
color='Total',template=get_map_theme(theme),barmode='relative',
color_continuous_scale= color_vacc)
fig.update_layout(dragmode=False)
fig1,fig2=st.beta_columns([1,1])
fig1.plotly_chart(fig,**{'config':config})
fig=px.bar_polar(test_data.sort_values(by='Total')[-15:],
title='Top 15 states in testing',
theta="State",r='Total',
color='Total',template=get_map_theme(theme),barmode='relative',
color_continuous_scale= color_test)
fig.update_layout(dragmode=False)
fig2.plotly_chart(fig,**{'config':config})
st.plotly_chart(fig_bar,**{'config':config})
if out=='Forecast Reports':
st.header('Forecast Reports')
with st.spinner('Loading Data'):
mata=pd.read_csv('final_data.csv')
mata.Date=pd.to_datetime(mata.Date,dayfirst=True)
daywise=mata.groupby('Date').agg(sum)
daywise.sort_index(ascending=True,inplace=True)
days=st.slider(label='Number of days to forecast',
min_value=1,max_value=100,step=1)
if st.button('Forecast'):
with st.spinner('Please wait... Model are working with numbers'):
st.plotly_chart(forecast_india(daywise,days))
if out=='Data Reports':
data=pd.read_csv('final_data.csv')
data.set_index(data.Date)
data.sort_index(inplace=True)
state=st.selectbox('Select the state for state related data: ',list(data.State.unique()))
data=data[data.State==state]
styled_data=data.style
styles = [
dict(selector="tr:hover",props=[("background-color", "#eccbd9")]),
dict(selector="th:hover",props=[("background-color", "#eccbd9")]),
dict(selector="th", props=[('background-color','{}'.format(secondaryBackgroundColor)),
("font-size", "120%"),
("text-align", "center"),
('color', '#ffffff'),
('border','1px solid #dbe9ee')]),
dict(selector='tbody', props=[('color', '{}'.format(secondaryBackgroundColor)),]),
dict(selector='tr', props=[('background-color','{}'.format(secondaryBackgroundColor)),
('color', '{}'.format(textColor)),
('font-family','Helvetica')]),
dict(selector='td',props=[('border','1px solid {}'.format(textColor))])]
styles.extend(magnify())
df=styled_data.set_table_styles(styles)
components.html(df.render(),width=600,height=800,scrolling =True)
| {"/visualizer_heroku.py": ["/loader.py"]} |
66,507 | sanaynesargi/CarControlForPi | refs/heads/master | /arrow.py | import pygame
import requests
class Arrow:
def __init__(self, x, y, width, height, surface, img, name, ovverride):
self.x = x
self.y = y
self.img = img
self.surf = surface
self.pressed = False
self.name = name
self.width = width
self.height = height
self.ovverride = ovverride
self.pressable = True
def draw(self):
url = 'http://192.168.86.30:5000/'
if self.pressed:
if self.name != 'stop':
if self.name == 'up':
requests.post(url, headers={'direction': 'forward'})
if self.name == 'down':
requests.post(url, headers={'direction': 'backward'})
if self.name == 'left':
requests.post(url, headers={'direction': 'left'})
if self.name == 'right':
requests.post(url, headers={'direction': 'right'})
pygame.draw.rect(self.surf, (0, 0, 255), (self.x, self.y, self.width, self.height))
elif self.pressable and self.name == 'stop':
requests.post(url, headers={'direction': 'stop'})
pygame.draw.rect(self.surf, (0, 20, 128), (self.x, self.y, self.width, self.height))
else:
if self.ovverride:
pygame.draw.rect(self.surf, (0, 255, 0), (self.x, self.y, self.width, self.height))
else:
pygame.draw.rect(self.surf, (255, 20, 128), (self.x, self.y, self.width, self.height))
self.surf.blit(self.img, (self.x + 5.35, self.y + 5.35))
def get_dims(self):
return (50, 50)
def check_press(self, mx, my):
return ((mx > self.x) and (mx < (self.x + 50))) and ((my > self.y) and (my < (self.y + 50)))
| {"/main.py": ["/arrow.py"], "/v1.py": ["/arrow.py"]} |
66,508 | sanaynesargi/CarControlForPi | refs/heads/master | /main.py | import pygame, time, os
from arrow import Arrow
pygame.init()
# PYGAME VARIABLES
S_WIDTH, S_HEIGHT = 300, 300
DIMENSIONS = (S_WIDTH, S_HEIGHT)
win = pygame.display.set_mode(DIMENSIONS)
# COLORS
WHITE = (255, 255, 255)
RED = (255, 10, 10)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
BLACK = (0, 0, 0)
def load_images():
names = ['up', 'down', 'right', 'left']
imgs = []
for name in names:
imgs.append(pygame.image.load(os.path.join('images', 'arrows', name + '.png')).convert())
return imgs
def create_arrows(img_arr):
names = ['up', 'down', 'left', 'right']
arrs = []
x, y = (S_WIDTH//2 - 25) - 60, (S_HEIGHT//2 - 25)
for i in range(2):
arrs.append(Arrow(x, y, 50, 50, win, img_arr[-(i + 1)], names[-(i + 1)], True))
x += 120
x = (S_WIDTH//2 - 25)
y -= 60
for i in range(2):
arrs.append(Arrow(x, y, 50, 50, win, img_arr[i], names[i], True))
y += 120
x, y = (S_WIDTH//2 - 15), (S_HEIGHT//2 - 15)
stop_img = pygame.image.load(os.path.join('images', 'stop.png')).convert()
arrs.append(Arrow(x, y, 30, 30, win, stop_img, 'stop', False))
return arrs
def initialize():
images = load_images()
arrows = create_arrows(images)
return arrows
ARROWS = initialize()
def draw():
win.fill((BLACK))
for a in ARROWS:
a.draw()
pygame.display.update()
def toggle_arrow_presses(mode):
for a in ARROWS[:4]:
a.pressable = mode
def main():
pygame.display.set_caption("Car Control Wizard")
run = True
while run:
draw()
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
for a in ARROWS:
if a != ARROWS[-1]:
a.pressed = False
#toggle_arrow_presses(False)
if pygame.mouse.get_pressed()[0]:
if a == ARROWS[-1] and a.pressed:
a.pressed = False
#toggle_arrow_presses(True)
else:
a.pressed = a.check_press(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1])
pygame.quit()
if __name__ == "__main__":
main() | {"/main.py": ["/arrow.py"], "/v1.py": ["/arrow.py"]} |
66,509 | sanaynesargi/CarControlForPi | refs/heads/master | /v1.py | # import RPi.GPIO as GPIO
import pygame, time, os
from arrow import Arrow
pygame.init()
# GPIO.setwarnings(False)
# GPIO.setmode(GPIO.BCM)
# GLOBAL VARIBLES
FIRST_CONTROL_PINS = [5, 6, 19, 26]
SECOND_CONTOL_PINS = [21, 20, 16, 12]
# PYGAME VARIABLES
S_WIDTH, S_HEIGHT = 300, 300
DIMENSIONS = (S_WIDTH, S_HEIGHT)
win = pygame.display.set_mode(DIMENSIONS)
# COLORS
WHITE = (255, 255, 255)
RED = (255, 10, 10)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
BLACK = (0, 0, 0)
def load_images():
names = ['up', 'down', 'left', 'right']
imgs = []
for name in names:
imgs.append(pygame.image.load(os.path.join('arrows', name + '.png')).convert())
return imgs
def create_arrows(img_arr):
arrs = []
x, y = (S_WIDTH//2 - 25) - 60, (S_HEIGHT//2 - 25)
print(len(img_arr))
for i in range(2):
arrs.append(Arrow(x, y, win, img_arr[-(i + 1)]))
x += 120
x = (S_WIDTH//2 - 25)
y -= 60
for i in range(2):
arrs.append(Arrow(x, y, win, img_arr[i]))
y += 120
return arrs
def initialize():
images = load_images()
arrows = create_arrows(images)
return arrows
ARROWS = initialize()
def draw():
win.fill((BLACK))
for a in ARROWS:
a.draw()
pygame.display.update()
def main():
pygame.display.set_caption("Car Control Wizard")
run = True
while run:
draw()
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
pygame.quit()
if __name__ == "__main__":
main() | {"/main.py": ["/arrow.py"], "/v1.py": ["/arrow.py"]} |
66,525 | cherylyli/botlikeme | refs/heads/master | /dictogram.py | # Make a dictionary to store all the tokens from the sentences
# Each key is associated with an array of dictionaries
# Key: a word
# Value: all the words that came after that word, with weights assigned to it
# TODO:
# count how many words in a sentence, use this to influence length of sentence
import re
class Dictogram:
def __init__(self, dictionary=None):
"""
initiate the current dictionary with a pre-existing dictionary
"""
if dictionary:
self.dictionary = dictionary
else:
self.dictionary = {}
def add_tokens(self, string):
"""
Given a string of words, insert start and end tokens,
parse into tokens, and put into the dictionary.
"""
# tokenize string, partitions '...', '!', and '?'
token_list = re.split(r'([().!? ]+)', string)
token_list.insert(0, "[START]")
token_list.insert(len(token_list), "[END]")
# clean up tokens, if token is an empty string, then remove
i = 0
while i<len(token_list):
if token_list[i].strip() == "":
token_list.pop(i)
else:
i += 1
# store tokens and weights into dict
for i in range(len(token_list)-1):
if token_list[i] in self.dictionary.keys():
word_dict = self.dictionary[token_list[i]]
if token_list[i+1] in word_dict.keys():
word_dict[token_list[i+1]] += 1
else:
word_dict[token_list[i+1]] = 1
self.dictionary[token_list[i]] = word_dict
else:
self.dictionary[token_list[i]] = {token_list[i+1]: 1}
return self.dictionary
def add_many_tokens(self, long_string):
"""
parse a long string into smaller strings, and for each small string
call the previous function
"""
long_string_parsed = re.split(r'\\n', long_string)
i = 0
for string in long_string_parsed:
self.add_tokens(string)
return self.dictionary
| {"/parse_text.py": ["/dictogram.py"]} |
66,526 | cherylyli/botlikeme | refs/heads/master | /generate_with_seed.py | import sys
import json
from random import randint
import numpy as np
# takes seed from command ling
arguments = sys.argv[1:]
arguments = ' '.join(arguments)
sentence = '[START] '
sentence += arguments
print("seeding with: " + arguments + " ...")
dictionary = {}
# imports the dictionary
with open('data/dict.txt', 'r') as fp:
file = fp.read()
dictonary = json.loads(file)
next_seed_word = sys.argv[len(sys.argv)-1]
# find probability
while next_seed_word != '[END]' and next_seed_word != None:
possible_words = dictonary[next_seed_word]
key_list = []
value_list = []
for key, value in possible_words.items():
key_list.append(key)
value_list.append(value)
new_word_place = randint(0, np.sum(value_list))
i = 0
for num in value_list:
if new_word_place - num < 0:
next_seed_word = key_list[i]
break
else:
new_word_place -= num
i += 1
sentence += " "
sentence += next_seed_word
print(sentence)
| {"/parse_text.py": ["/dictogram.py"]} |
66,527 | cherylyli/botlikeme | refs/heads/master | /data/parse_messages.py | from bs4 import BeautifulSoup
import json
soup = BeautifulSoup(open("messages.htm"), "lxml")
message_threads = soup.find_all('p')
# get all the messages into an array object
messages = []
for message in message_threads:
messages = messages + message.contents
with open('input.txt', 'w') as fp:
json.dump('\n'.join(messages), fp)
print("In total, " + str(len(messages)) + " threads.") | {"/parse_text.py": ["/dictogram.py"]} |
66,528 | cherylyli/botlikeme | refs/heads/master | /parse_text.py | from dictogram import Dictogram
import json
test_dict = Dictogram()
with open('data/input.txt', 'r') as fp:
file = fp.read()
test_sentence = test_dict.add_many_tokens(file)
with open('data/dict.txt', 'w') as output:
json.dump(test_sentence, output) | {"/parse_text.py": ["/dictogram.py"]} |
66,542 | Marck-G/IMTRA | refs/heads/master | /search_engine/reader.py | # MIT License
#
# Copyright (c) 2019 MARCK C. GUZMAN, UNAI DIAZ DE GARAYO
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import exifread
class Reader:
"""
Class for work with exif tags
"""
data = None
def __init__(self, img):
self.image = img
# reed all exif tag from image
def __read__(self):
"""
Read the image exif tags
"""
# open the img file in binary mode and only to read
with open(self.image, "rb") as file:
# set the file content to the reader
self.data = exifread.process_file(file)
# in data we have a hash map with all exif tags
file.close()
def get_data(self) -> dict:
"""
:return: full image exif tags
"""
if self.data is None:
self.__read__()
return self.data
def set_image(self, img):
self.image = img
self.data = None
def get_filter_tag(self, filter_list: list) -> dict:
""":param filter_list list of tags to return
:return map with the requires tags and values
"""
if filter_list is None:
return -1
map_out = {}
for key in self.get_data():
if key in filter_list:
map_out[key] = self.get_data().get(key)
if len(map_out.keys()) == 0:
return None
return map_out
def key_replace(self, map, apply=False):
"""
Replace the original key of the data map to the passed key
:param map: hash map with the original and new keys -> {original_key: new_key}
:param apply: if we want to apply the change in the current object
:return: the map with the news keys
"""
if map is None:
raise NoMapSetError("Need a map with the key to replace")
temp_map = {}
for key in map:
temp_map[map[key]] = self.get_data()[key]
# apply change for the object if dev want
if apply:
self.data = temp_map
return temp_map
class NoMapSetError(Exception):
pass
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,543 | Marck-G/IMTRA | refs/heads/master | /image_transfer/db_manager.py | # MIT License
#
# Copyright (c) 2019 MARCK C. GUZMAN, UNAI DIAZ DE GARAYO
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sqlite3 as db
from os import path
from utils.logger import Logger
class Manager:
"""
Class DataBase Manager
"""
# TODO: set the proyect data estructure
__instance__ = None
# creation file name
__db_file__ = 'imtr.cre.db.sql'
__bd_name__ = 'transfer.db'
# database deletion file name
__db_file_delete__ = 'imtr.del.db.sql'
# set the files folders
__db_folder__ = "./"
__log_folder__ = "./"
# sqlite connection
conn = db.connect(__bd_name__)
# SINGLETON
def __new__(cls, *args, **kwargs):
if cls.__instance__ is None:
cls.__instance__ = object.__new__(cls)
return cls.__instance__
def set_db_file_folder(self, base_dir):
self.__db_folder__ = base_dir if str(base_dir).endswith('/') else base_dir + '/'
return self
def set_log_folder(self, base_dir):
self.__log_folder__ = base_dir if str(base_dir).endswith('/') else base_dir + '/'
return self
def create_db(self):
"""
Create the database
:return:
"""
qry = open(self.__db_folder__+self.__db_file__, 'r').read()
c = self.conn.cursor()
c.executescript(qry)
self.conn.commit()
self.__log__('Created The DatBase')
return self
def delete_db(self):
"""
Delete the database
:return:
"""
qry = open(self.__db_folder__ + self.__db_file_delete__, 'r').read()
c = self.conn.cursor()
c.executescript(qry)
c.close()
self.conn.commit()
self.__log__('delete the database')
return self
def add_item(self, data: dict):
"""
Recive un diccionario con los datos y se guardan en la base de datos
:param data: diccionario con la estructura: { origin: origen, dest: destino, date : fecha}
:return:
"""
# general insert into string
insert_sql = "INSERT INTO {}({}) values({})"
table = "transfer"
# coment Here
insert = insert_sql.format(table, ",".join(data.keys()), ",".join(data.values()))
self.__log__(insert)
# execute the command
self.conn.execute(insert)
# save al changes
self.conn.commit()
return self
def get_dest(self) -> dict:
"""
Find in the database the dest dir more used.
return: dict with the destination dir
"""
out = {}
response = cur = self.conn.execute("SELECT path, count(path) c, ( SELECT max(trf_date) d FROM transfer WHERE main.path = path ) dat FROM transfer main WHERE type = 'D' GROUP BY path ORDER BY c DESC, dat DESC").fetchmany(5)
# generate the output dictionary
for col in response.keys():
out[col] = response[col]
return out
def get_src(self) -> dict:
"""
Find in the database the origin dir more used.
return: dict with the origin dirs
"""
out = {}
response = cur = self.conn.execute("SELECT path, count(path) c, ( SELECT max(trf_date) d FROM transfer WHERE main.path = path ) dat FROM transfer main WHERE type = 'O' GROUP BY path ORDER BY c DESC, dat DESC").fetchmany(5)
# generate the output dictionary
for col in response.keys():
out[col] = response[col]
return out
def __log__(self, text):
"""
Create a log file with de date and the text
:param text: to include in the log file
"""
Logger(prefix=' Image Transfer').log(text)
def __exists__(self):
"""
Check if the db exists and have its tables
:return: boolean
"""
# check if db file exist
exist_file = path.exists(self.__db_name__)
if exist_file:
if len( self.__get_db_tables__()) != 0:
return True
else:
self.__log__('No tables found')
else:
self.__log__('Not found db file')
return False
def __db_init__(self):
if not self.__exists__():
self.create_database()
def get_transfer(self, *args, data):
cur = self.conn.execute('SELECT * FROM transfer where {} like "*{}*" '.format(data['col'], data['value']))
response = {}
for col in cur:
response[col] = cur[col]
class ConditionsNotFoundError(Exception):
pass
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,544 | Marck-G/IMTRA | refs/heads/master | /search_engine/db_manager.py | # MIT License
#
# Copyright (c) 2019 MARCK C. GUZMAN, UNAI DIAZ DE GARAYO
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sqlite3
from os import path
from utils.logger import Logger
class DBManager:
"""
Class DataBase Manager
"""
# TODO: set the proyect data estructure
# singleton var
__instance__ = None
# dictionary for translate the column names into exift tags
__dic__ = None
# dictionario for map the database
__db__ = {}
# creation file name
__db_file__ = "se.cre.db.sql"
__db_name__ = "search.db"
# sqlite connection
conn = sqlite3.connect(__db_name__)
# db delete file
__db_file_delete__ = "se.del.db.sql"
# log file, contains all information about any operation
# on database and all errors
__log_file__ = "../.log"
# if there is any file folder for db files and log files
__db_folder__ = "./"
__log_folder__ = "./"
# SINGLETON
def __new__(cls):
if cls.__instance__ is None:
cls.__instance__ = object.__new__(cls)
return cls.__instance__
def set_db_file_folder(self, base_dir):
self.__db_folder__ = base_dir if str(base_dir).endswith('/') else base_dir + '/'
return self
def set_log_folder(self, base_dir):
self.__log_folder__ = base_dir if str(base_dir).endswith('/') else base_dir + '/'
return self
def create_database(self):
"""
Create the database
:return:
"""
qry = open(self.__db_folder__ + self.__db_file__, 'r').read()
c = self.conn.cursor()
c.executescript(qry)
c.close()
self.conn.commit()
self.__log__('Created the database')
return self
def delete_data_base(self):
"""
Delete the database
:return:
"""
qry = open(self.__db_folder__ + self.__db_file_delete__, 'r').read()
c = self.conn.cursor()
c.executescript(qry)
c.close()
self.conn.commit()
self.__log__('delete the database')
return self
def __get_db_columns__(self, table):
"""
:param table: table to index
:return: the table columns name in array
"""
# create the sql query for the table
sql_query = "select * from {}". format(table)
# execute the query and save the response
response = self.conn.execute(sql_query)
# get the cursor metadata
columns = [col[0] for col in response.description]
return columns
def set_dic(self, dic):
"""
Set the EXIF tag: columns dictionary
:param dic:
"""
self.__dic__ = dic
return self
def __get_db_tables__(self):
"""
:return: all db tables in array
"""
# the query to execute
cur = self.conn.execute("Select * from sqlite_master where type='table'")
# get the array with the tab names
tab_names = [tab_name[1] for tab_name in cur]
return tab_names
def __make_dic__(self):
"""
Create the database dictionary
:return:
"""
for table in self.__get_db_tables__():
self.__db__[table] = self.__get_db_columns__(table)
def add_item(self, dic: dict):
"""
Insert data into database from data dictionary
:param dic: data to insert in db
:return:
"""
# general insert into string
insert_sql = "INSERT INTO {}({}) values({})"
# get the db columns that match with the dict keys
arr_cols = [self.__dic__[tag] for tag in dic.keys()]
# we create a dictionary with the column name and the value
value_dic = {}
for i, key in enumerate(dic.keys()):
value_dic[arr_cols[i]] = dic[key]
# we want to create a dictionary with the table_name and the before dictionary
# { table1 : { col1 : value, col2: value2}, table2: { col1: value, col2: value2} }
dici = {}
for col in arr_cols:
for table in self.__db__.keys():
# if the current col is in the tables' columns update the dictionary entry
if col in self.__db__[table]:
dici[table].update({col: value_dic[col]})
for table in dici:
# create the insert query for each table
insert = insert_sql.format(table, ",".join(table.keys()), ",".join(table.values()))
self.__log__(insert)
# execute the command
self.conn.execute(insert)
# save al changes
self.conn.commit()
return self
def __log__(self, text: str):
"""
Create a log file with de date and the text
:param text: to include in the log file
"""
Logger(prefix=' Search Engine').log(text)
def __exists__(self):
"""
Check if the db exists and have its tables
:return: boolean
"""
# check if db file exist
exist_file = path.exists(self.__db_name__)
if exist_file:
if len( self.__get_db_tables__()) != 0:
return True
else:
self.__log__('No tables found')
else:
self.__log__('Not found db file')
return False
def __db_init__(self):
if not self.__exists__():
self.create_database()
def get_gps(self, *args, latitude, longitude):
cur = self.conn.execute('SELECT * FROM gps where lat={} AND log={}'.format(latitude,longitude))
response = {}
for col in cur:
response[col] = cur[col]
def get_lens(self, *args, data: dict):
cur = self.conn.execute('SELECT * FROM lens where {} like "*{}*" '.format(data['col'], data['value']))
response = {}
for col in cur:
response[col] = cur[col]
def get_place(self, *args, data: dict):
cur = self.conn.execute('SELECT * FROM place where {} like "*{}*" '.format(data['col'], data['value']))
response = {}
for col in cur:
response[col] = cur[col]
def get_camera(self, *args, data ):
cur = self.conn.execute('SELECT * FROM camera where {} like "*{}*" '.format(data['col'], data['value']))
response = {}
for col in cur:
response[col] = cur[col]
def get_img(self, *args, data: dict):
cur = self.conn.execute('SELECT * FROM img where {} like "*{}*" '.format(data['col'], data['value']))
response = {}
for col in cur:
response[col] = cur[col]
def get_img_studio(self, *args, data: dict):
cur = self.conn.execute('SELECT * FROM img_studio where {} like "*{}*" '.format(data['col'], data['value']))
response = {}
for col in cur:
response[col] = cur[col]
def exist_item(self, data: dict):
img_id = data["id"]
cur = self.conn.cursor()
img = cur.execute("SELECT * FROM img WHERE id=?", img_id).rowcount
cur.close()
return img == 0
class ConditionsNotFoundError(Exception):
pass
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,545 | Marck-G/IMTRA | refs/heads/master | /conn.py |
class Test:
dic_col_tag = {
"EXIF CreationDate": "make_date",
"Image Make":"camera",
"Image CameraModel": "model",
"EXIF IsoSpeedRatio": "iso"}
strings = ["make_date", "model", "camera"]
def add_item(self, map):
insert_sql = "INSERT INTO {}({}) values({})"
# recorremos el diccionario introducido y a su vez recuperamos el nombre de
# la columna que le corresponde segun el diccionario
ar_cols = [self.dic_col_tag[key] for key in map.keys()]
cols = ",".join(ar_cols)
# creamos un array con todos los valores, abria que mirar si se modifica el orden
values = [map[i] for i in map.keys()]
values = ['"{}"'.format(item) if ar_cols[i] in self.strings else item for i, item in enumerate(values)]
# creamos la parte del value con el join
vals = ",".join(values)
insert_sql = insert_sql.format( cols, vals )
return insert_sql
dic_datos = {
"EXIF CreationDate": "20/06/2018",
"Image Make": "Canon",
"EXIF IsoSpeedRatio": "100"
}
db_manager = Test()
print(db_manager.add_item(dic_datos))
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,546 | Marck-G/IMTRA | refs/heads/master | /image_manager/backup_tools.py | import tarfile as tar
import os
class ImageBackup:
__instance__ = None
_dir = None
_out_file = None
# SINGLETON
def __new__(cls, *args, **kwargs):
if cls.__instance__ is None:
cls.__instance__ = object.__new__(cls)
return cls.__instance__
def set_dir(self, directory):
self._dir = directory
return self
def set_out_file(self, name):
self._out_file = name
return self
def __create_index__(self):
with open(os.path.join(self._dir, '.index'), "w") as file:
file.write("{\n\t'origin_dir': '%s'\n}" % self._dir)
def compress(self):
"""
compress the dir
:return:
"""
# check if all needed fields are not None
assert self._dir is None, "No origin dir set"
assert self._out_file is None, "No output file name set"
# create the index
self.__create_index__()
# Created the compressed file
with tar.open(self._out_file, "w:gz") as c_file:
# list the origin directory
for parent, directories, files in os.walk(str(self._dir)):
for f in files:
file = os.path.join(parent, f)
# add to the compressed file
c_file.add(file)
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,547 | Marck-G/IMTRA | refs/heads/master | /__init__.py | import eel as app
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,548 | Marck-G/IMTRA | refs/heads/master | /search_engine/__init__.py | # MIT License
#
# Copyright (c) 2019 MARCK C. GUZMAN, UNAI DIAZ DE GARAYO
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# MANAGER
from .reader import Reader
from .db_manager import DBManager
class SearchEngine:
__reader__ = Reader("")
__filter__ = None # filter for tags
__replace_map__ = None # for replace the tags
__column_separator__ = ";"
__value_separator__ = ":"
__replaces_key__ = []
def __init__(self):
self.__db_manager__ = DBManager()
self.set_replace_map(db_manager.getReplaceMap())
self.__filter__ = db_manager.getFilter()
def __read__(self, img):
"""
Read the img EXIF Tags
:param img: image url
:return:
"""
if img is None:
raise NoImageSetError("Image not set for read the EXIF tags. Search Engine Error!")
# set the image to read
self.__reader__.set_image(img)
if self.__filter__ is not None:
# read the tags and return the data
return self.__reader__.get_filter_tag(self.__filter__)
else:
return self.__reader__.get_data()
def __db_store__(self):
self.__db_manager__.add_item(self.__reader__.get_data())
def set_filter(self, fil: list):
if fil is None:
raise NoImageSetError("No Filter found. Search Engine Error!")
self.__filter__ = fil
def process_image(self, img):
# when read all data save in the object __reader__
self.__read__(img)
if self.__replace_map__ is not None:
self.__reader__.key_replace(self.__replace_map__, True)
self.__db_store__()
pass
def set_replace_map(self, map):
if map is None:
raise NoReplaceMapSetError("Need a replace Map. Search Engine Error!")
self.__replace_map__ = map
for key in map:
self.__replaces_key__.append(map[key])
def search(self, data: dict):
"""
with the dictionary make the database request and return the images ids
:param data: search dict
:return: list with the images referenced id
"""
table = data['table']
if table == 'lens':
return self.__db_manager__.get_lens(data= data)
if table == 'camera':
return self.__db_manager__.get_camera(data=data)
if table == 'studio':
return self.__db_manager__.get_img_studio(data=data)
if table == 'gps':
return self.__db_manager__.get_gps(latitude=data['lat'], longitude=data['log'])
if table == 'place':
return self.__db_manager__.get_place(data=data)
class NoImageSetError(Exception):
pass
class NoFilterSetError(Exception):
pass
class NoReplaceMapSetError(Exception):
pass
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,549 | Marck-G/IMTRA | refs/heads/master | /image_transfer/transfer.py | import os
import datetime
from search_engine import reader
from utils.logger import Logger
from image_transfer.data_interpreter import dir as DIR
class Transfer:
__name__ = 'Image Transfer Module'
__base_dir__ = None
__dest_dir__ = None
__ext_file__ = 'f.ext'
__extensions__ = None # list with allowed extension
__imgs__ = None # list of base dir img
__dirs_format__ = None
duplicated_images = []
def __mkdir__(self, dir):
if not os.path.exists(dir):
os.makedirs(dir)
return self
def set_base_dir(self, dir):
self.__base_dir__ = dir
# if dir not exists raise an error
if not os.path.exists(dir):
Logger(prefix=self.__name__).log('Dir {} not found'.format(str(dir)))
raise FileNotFoundError('Dir {} not found'.format(str(dir)))
self.__imgs__ = None # reset the file list
self.duplicated_images = [] # reset the duplicate images
return self
def set_transfer_format(self, format):
self.__dirs_format__ = format
return self
def set_dest_dir(self, dir):
self.__dest_dir__ = dir
return self
def read_extension(self):
"""
read the extension file
:return: list with all allowed extensions
"""
if self.__extensions__ is not None:
return self.__extensions__
lines = []
with open(self.__ext_file__, 'r') as ext:
for e in ext.readlines():
lines.append(e.replace('\n',''))
return lines
def list_dir(self):
"""
List the base dir attending the extension that are allowed
:return: list with files' path
"""
if self.__imgs__ is not None:
return self.__imgs__
else:
list = []
for r, d, f in os.walk(self.__base_dir__):
for file in f:
for ext in self.read_extension():
if file.lower().endswith(ext.lower()):
list.append(os.path.join(r,file))
self.__imgs__ = list
return list
def __read_date__(self, img):
"""
Search in image metadata data and if there not found nothing read the file creation date
:param img: to read the date
:return: the image date
"""
rdr = reader.Reader(img)
result = rdr.get_filter_tag(['Image DateTime', 'EXIF DateTimeOriginal'])
if result is None or len(result.keys()) == 0:
date = os.path.getctime(img)
date = datetime.datetime.fromtimestamp(date)
return date.__str__()
if len(result) == 2:
return result['EXIF DateTimeOriginal']
else:
return result['Image DateTime']
def __exists_img__(self, img):
return os.path.exists(img)
def has_duplicates(self):
return self.duplicated_images is not None and len(self.duplicated_images) != 0
def get_duplicates(self):
return self.duplicated_images
def get_size(self):
"""
:return: the total file in the origin folder
"""
return len(self.list_dir())
def transfer(self, image):
"""
transfer one image
:param image: the image to transfer
:return if there are duplicate images
"""
if self.__base_dir__ is None:
raise self.BaseErrorNotFoundError("The origin directory must set", Logger(prefix=self.__name__))
if self.__dest_dir__ is None:
raise self.DestErrorNotFoundError("The destination directory must set", Logger(prefix=self.__name__))
date = self.__read_date__(image)
dest_dir = self.__dest_dir__ + DIR(date)
img = image.split(os.path.sep)
# get the last part of the split
img = img[len(img) - 1]
img = os.path.join(dest_dir, img)
# check if exist in the destination folder
exists = self.__exists_img__(img)
if exists:
# add to the duplicate list
self.duplicated_images.append(img)
else:
self.__mkdir__(dest_dir)
with open(image, "rb") as _input:
with open(img, "wb") as _output:
size = os.path.getsize(image)
_output.write(_input.read(size))
def transfer_all(self, callback):
for i, image in enumerate(self.list_dir()):
try:
self.transfer(image)
callback( {"image": image,
"status": "ok",
"number": i + 1,
"total": self.get_size()})
except Exception as e:
callback( {"image": image,
"status": "error",
"number": i + 1,
"total": self.get_size(),
"error": e})
# transfer the dupicate image, need a callback function
def transfer_duplicatesl(self, callback):
for i,image in enumerate(self.duplicated_images):
self.transfer(image)
callback({"image": image, "status": "ok", "number": i + 1, "total": len(self.duplicated_images)})
def save_transfer(self):
if self.__dest_dir__ is None and self.__base_dir__ is None:
return
class BaseErrorNotFoundError (Exception):
def __init__(self, msg, log):
Exception.__init__(msg)
log.log("{} {}".format(self.__class__.__name__, msg))
class DestErrorNotFoundError (Exception):
def __init__(self, msg, log):
Exception.__init__(msg)
log.log("{} {}".format(self.__class__.__name__, msg))
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,550 | Marck-G/IMTRA | refs/heads/master | /__main__.py | import eel as app
from gui import show_dir_chouser as select_dir
app.init('views')
@app.expose
def chouse_folder(targe, title='Selection', initialdir=False):
directory = select_dir(title=title, initialdir= None if not initialdir else initialdir )
set_by_id(targe, str(directory))
def set_by_id(id, content):
app.setById(id, content)
app.start('main.html')
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,551 | Marck-G/IMTRA | refs/heads/master | /gui/__init__.py | # need Tkinter to install it sudo apt isntall python3-tk
from tkinter import Tk
from tkinter.filedialog import askdirectory
def show_dir_chouser(title, *args, initialdir = None):
"""
ask for user to select a folder
:param title: of the window
:param initialdir: folder to open
:return:
"""
from pathlib import Path
# if ther is any dir we set the home as default dir
if initialdir is None:
initialdir = Path.home()
Tk().withdraw()
options = {
'initialdir': str(initialdir),
'title': title
}
dirname = askdirectory(**options)
return dirname
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,552 | Marck-G/IMTRA | refs/heads/master | /image_manager/__api__.py | from Crypto.Cipher import AES
def aes_key():
return '6pú2BÑq9pfI.Ú'
def api_key():
suit = AES.new(key=aes_key())
with open('api_key.k', "rb") as f:
key = f.read()
return suit.decrypt(key).decode('utf-8')
API_KEY = api_key()
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,553 | Marck-G/IMTRA | refs/heads/master | /image_manager/autotagger.py | from clarifai.rest import ClarifaiApp
from image_manager.__api__ import API_KEY
async def tags_from(img):
"""
get tags from image using an api rest
:param img:
:return: dictionary with the tags
"""
app = ClarifaiApp(api_key=API_KEY)
model = app.public_models.general_model
res = model.predict_by_filename(img)
out = {}
try:
for tag in res["outputs"][0]["data"]["concepts"]:
out[tag["name"]] = tag["value"]
return out
except Exception:
return None
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,554 | Marck-G/IMTRA | refs/heads/master | /image_transfer/data_interpreter.py | import datetime
def dir(d, format = 'yyyy/MM/dd-MM-yy', sys_date=False):
"""
Generate a path string with the date and the format
:param d: date to make the tree dir
:param format: format of the tree dir
:return: string with the tree dir
"""
split_regex = '/'
if ':' in format: split_regex = ':'
split = format.split(split_regex)
# parse the date
date = datetime.datetime.strptime(str(d), "%Y:%m:%d %H:%M:%S") \
if str(d).index('.') == -1 else datetime.datetime.strptime(str(d), "%Y-%m-%d %H:%M:%S.%f")
string = ''
for el in split:
# split any subformat
if '-' in el:
tmp_split = el.split('-')
str_el = ''
for index,i in enumerate(tmp_split):
str_el = str_el + replace(i, date)
if index < 2:
str_el = str_el + '-'
string = string + '/' + str_el
else:
string = string + '/' + replace(el, date)
return string
def replace(data, date):
# year testing
if 'yyyy' in data:
return str(date.year)
if 'yy' in data:
year = date.year
return str(year)[2:]
# month testing
if 'MM' in data:
month = str(date.month)
return month if len(month) == 2 else '0' + month
# day testing
if 'dd' in data:
day = str(date.day)
return day if len(day) == 2 else '0' + day
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,555 | Marck-G/IMTRA | refs/heads/master | /image_transfer/__init__.py | from .transfer import Transfer
from .data_interpreter import * | {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,556 | Marck-G/IMTRA | refs/heads/master | /api/__init__.py | from search_engine.reader import Reader
from image_transfer.transfer import Transfer
from image_manager.autotagger import tags_from as tag
def search(data):
pass
def read(petition: dict) -> dict:
assert petition is not None, "Not data found"
key_list = petition.keys()
assert 'image' in key_list, "Image not found in data"
image = petition["image"]
tag_list = petition['filter'] if 'filter' in key_list else None
rd = Reader(image)
out = {}
out['image'] = image
out['result'] = {}
if tag_list is not None:
_data = rd.get_filter_tag(tag_list)
for tag in _data.keys():
out['result'][tag] = _data[tag]
else:
_data = rd.get_data()
for tag in _data.keys():
out['result'][tag] = _data[tag]
return out
def tagger(pettition: dict) -> dict:
assert pettition is not None, "No petition found"
assert pettition['image'], "Image not found"
return tag(pettition['image'])
def transfer(petition: dict):
assert petition is not None, "No petition found"
origin = petition["source"]
target = petition["target"]
callback = petition["callback"]
duplicate_callback = petition["duplicate_callback"] if petition["duplicate_callback"] else None
transfer = Transfer()
transfer.set_base_dir(origin)
transfer.set_dest_dir(target)
transfer.transfer_all(callback)
if duplicate_callback is not None and transfer.has_duplicates():
duplicate_callback(transfer.get_duplicates()) | {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,557 | Marck-G/IMTRA | refs/heads/master | /utils/logger.py | from datetime import datetime
class Logger:
"""
Generate the log file with all the app info
"""
__instance__ = None
output_file = '.log'
prefix = None
# SINGLETON
def __new__(cls, *args, output_file=False, prefix=False):
if cls.__instance__ is None:
cls.__instance__ = object.__new__(cls)
if output_file:
cls.__instance__.output_file = output_file
if prefix:
cls.__instance__.prefix = prefix
return cls.__instance__
def log(self, msg):
with open(self.output_file, "a+") as out:
line = "{}\t {}: {}\n"
date = datetime.now()
w = line.format( date, self.prefix, msg)
out.writelines(w)
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,558 | Marck-G/IMTRA | refs/heads/master | /image_manager/__init__.py | from .__api__ import API_KEY
| {"/search_engine/db_manager.py": ["/utils/logger.py"], "/search_engine/__init__.py": ["/search_engine/reader.py", "/search_engine/db_manager.py"], "/image_transfer/transfer.py": ["/search_engine/__init__.py", "/utils/logger.py", "/image_transfer/data_interpreter.py"], "/__main__.py": ["/gui/__init__.py"], "/image_manager/autotagger.py": ["/image_manager/__api__.py"], "/image_transfer/__init__.py": ["/image_transfer/transfer.py", "/image_transfer/data_interpreter.py"], "/api/__init__.py": ["/search_engine/reader.py", "/image_transfer/transfer.py", "/image_manager/autotagger.py"], "/image_manager/__init__.py": ["/image_manager/__api__.py"]} |
66,560 | GraceDurham/Welp | refs/heads/master | /resturant_recommender.py | import weather_helper
import yelp_helper
def recommend_based_on_weather(state, city):
#get the temp
temp = weather_helper.fetch_weather(state, city);
str_temp = str(temp)
#based on the temp we are going to populate a search term for yelp
search_term = 'indoor seating'
headline = "It feels like " + str_temp + " outside. How about eating somewhere inside. Pick one below."
if(temp > 65):
search_term = 'outdoor seating'
headline = "It feels like " + str_temp + " outside. What a great day to go to an outdoor resturant. Pick one below."
elif(temp < 40):
search_term = 'soup'
headline = "It feels like " + str_temp + " outside. Better grub somewhere warm with soup. Pick one below."
#hit the yelp API for resturants in our area using the search term
resturants = yelp_helper.fetch_resturants(state, city, search_term);
return {"resturants": resturants, "temp": temp, "headline": headline} | {"/resturant_recommender.py": ["/weather_helper.py", "/yelp_helper.py"], "/resturants.py": ["/resturant_recommender.py"]} |
66,561 | GraceDurham/Welp | refs/heads/master | /yelp_helper.py | from urllib2 import urlopen
from config_secret import*
from urllib import pathname2url
from yelp.client import Client
from yelp.oauth1_authenticator import Oauth1Authenticator
auth = Oauth1Authenticator(
consumer_key=YOUR_CONSUMER_KEY,
consumer_secret=YOUR_CONSUMER_SECRET,
token=YOUR_TOKEN,
token_secret=YOUR_TOKEN_SECRET
)
client = Client(auth)
params = {
'lang': 'en'
}
#search for and print businesses
def fetch_resturants(state, city, search_term):
#go to yelp api and fetch business
params['term'] = search_term;
response= client.search(state + " " + city, **params)
return response.businesses
| {"/resturant_recommender.py": ["/weather_helper.py", "/yelp_helper.py"], "/resturants.py": ["/resturant_recommender.py"]} |
66,562 | GraceDurham/Welp | refs/heads/master | /weather_helper.py |
from config_secret import*
from urllib import pathname2url
from urllib2 import urlopen
from json import load
#takes a city and state and returns the wether
def fetch_weather(state, city):
url_base = 'http://api.wunderground.com/api/' + weather_underground_key
apiUrl = url_base+"/conditions/q/"+pathname2url(state)+"/"+pathname2url(city)+".json"
response=urlopen(apiUrl)
json_obj=load(response)
if json_obj.get("current_observation"):
return int(float(json_obj["current_observation"]["feelslike_f"]))
return 0
| {"/resturant_recommender.py": ["/weather_helper.py", "/yelp_helper.py"], "/resturants.py": ["/resturant_recommender.py"]} |
66,563 | GraceDurham/Welp | refs/heads/master | /resturants.py | from flask import Flask
from flask import render_template
from flask import request, url_for, redirect, session, g, flash
import resturant_recommender
app = Flask(__name__, static_url_path='')
@app.route('/recomendations', methods=['GET'])
def list_recomendations():
state = request.args.get("state")
city = request.args.get("city")
recommendation_info = resturant_recommender.recommend_based_on_weather(state, city)
resturants = recommendation_info["resturants"]
temp = recommendation_info["temp"]
headline = recommendation_info["headline"]
return render_template('recomendations/list.html', headline=headline, temp=temp, resturants=resturants, state=state, city=city)
@app.route('/', methods=['GET'])
def default():
return render_template('recomendations/index.html')
if __name__ == '__main__':
# init_db()
app.run(debug=True) | {"/resturant_recommender.py": ["/weather_helper.py", "/yelp_helper.py"], "/resturants.py": ["/resturant_recommender.py"]} |
66,618 | ckoppula199/Webcam-Motion-Detector-Logger | refs/heads/master | /Visualisation.py | from bokeh.plotting import show, figure, output_file
from bokeh.models import HoverTool, ColumnDataSource
from MotionDetector import df
#create new columns with strings representing times to be used when hovering over data
df["Start_string"]=df["Start"].dt.strftime("%Y-%m-%d %H:%M:%S")
df["End_string"]=df["End"].dt.strftime("%Y-%m-%d %H:%M:%S")
# create a data source for the columns
cds = ColumnDataSource(df)
#create the graph object and give it some specific attributes
p = figure(x_axis_type = 'datetime', height=100, width=500, sizing_mode='scale_width', title='Motion Graph')
p.yaxis.minor_tick_line_color = None
p.ygrid[0].ticker.desired_num_ticks=1
# hover tool implementation
hover = HoverTool(tooltips=[("Start", "@Start_string"), ("End", "@End_string")])
p.add_tools(hover)
# determines type of graph, in this case a quadrat graph
q = p.quad(left="Start", right="End", bottom=0, top=1, color='blue', source=cds)
# save graph as Graph.html and launch the webpage with the graph
output_file("Graph.html")
show(p)
| {"/Visualisation.py": ["/MotionDetector.py"]} |
66,619 | ckoppula199/Webcam-Motion-Detector-Logger | refs/heads/master | /MotionDetector.py | import cv2, time, pandas, imutils
from imutils.video import VideoStream
from datetime import datetime
# This is used to store the first image seen by the camaera and
# acts as a reference for following images to be compared against
reference_frame = None
status_list = [None, None] # list needs 2 initial items
times = []
df=pandas.DataFrame(columns=["Start", "End"])
#starts the camera, argument should be changed if multiple cameras are available
#video=cv2.VideoCapture(0) # alternate way of accessing camera but I've found it not to be as good
video = VideoStream(src=0).start()
time.sleep(2.0) #gives camera time to adjust to envronmental conditions
while True:
# captures boolean and numpy array from camera
#check, frame = video.read() # if using cv2.VideoCapture(0) then uncomment this and comment the below statement
frame = video.read()
text = "No Movement Detected"
status = 0
# converts image to a gray version for more accuracy later on
frame = imutils.resize(frame, width=500)
gray=cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray=cv2.GaussianBlur(gray, (21, 21,), 0) # smooths edges and reduces noise in calculations
#checks to see if we need to assign the reference frame a value
if reference_frame is None:
reference_frame = gray
continue
# calculates diffeence between reference_frame and current fram and stores as an image
delta_frame=cv2.absdiff(reference_frame, gray)
# makes any pixel with a difference larger than athreshold white, else black
thresh_frame=cv2.threshold(delta_frame, 25, 255, cv2.THRESH_BINARY)[1] #adjust second argument to change the difference required for a pixel to be classed as moving
thresh_frame=cv2.dilate(thresh_frame, None,iterations=2)
# finds contours of distinct objects in the frame
(_,cnts,_)=cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# if object has area greater than 500 pxls then it is highlighted
for contour in cnts:
if cv2.contourArea(contour) < 500: # change value based on size of object trying to detect
continue
status = 1
(x, y, w, h) = cv2.boundingRect(contour)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255), 2) #draw rectangle
text = "Motion Detected"
#saves space as we only need the last 2 values to check a change in status
status_list.append(status)
status_list = status_list[-2:]
if status_list[-1] == 1 and status_list[-2] == 0: #object has been detected
times.append(datetime.now())
if status_list[-1] == 0 and status_list[-2] == 1: #object is no longer being detected
times.append(datetime.now())
# adds text overlay of current status and time
cv2.putText(frame, "Status: {}".format(text), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0 , 255), 1)
# displays images
cv2.imshow("Gray", gray)
cv2.imshow("Threshold", thresh_frame)
cv2.imshow("Delta", delta_frame)
cv2.imshow("frame", frame)
# if q is pressed then loop is exited
key = cv2.waitKey(1)
if key == ord('q'):
if status == 1:
times.append(datetime.now())
break
for i in range(0, len(times), 2):
df = df.append({"Start": times[i], "End": times[i+1]}, ignore_index=True)
df.to_csv("Times.csv")
#video.release()
video.stop()
cv2.destroyAllWindows()
| {"/Visualisation.py": ["/MotionDetector.py"]} |
66,626 | JMasekar/Triangle567 | refs/heads/master | /TestTriangle.py | # -*- coding: utf-8 -*-
"""
Updated Jan 21, 2018
The primary goal of this file is to demonstrate a simple unittest implementation
@author: jrr
@author: rk
"""
import unittest
from Triangle import classifyTriangle
# This code implements the unit test functionality
# https://docs.python.org/3/library/unittest.html has a nice description of the framework
class TestTriangles(unittest.TestCase):
# define multiple sets of tests as functions with names that begin
def testRightTriangle(self):
""" testing right triangles """
self.assertEqual(classifyTriangle(3, 4, 5), "Right Triangle")
self.assertEqual(classifyTriangle(5, 3, 4), "Right Triangle")
def testEquilateralTriangle(self):
""" testing equilateral triangles """
self.assertEqual(classifyTriangle(1, 1, 1), "Equilateral Triangle")
self.assertEqual(classifyTriangle(2, 2, 2), "Equilateral Triangle")
self.assertEqual(classifyTriangle(5, 5, 5), "Equilateral Triangle")
def testScaleneTriangle(self):
""" testing scalene triangles """
self.assertEqual(classifyTriangle(3, 5, 7), "Scalene Triangle")
def testIsoscelesTriangle(self):
""" testing isosceles triangles """
self.assertEqual(classifyTriangle(3, 3, 3), "Equilateral Triangle")
self.assertEqual(classifyTriangle(3, 4, 5), "Right Triangle")
self.assertEqual(classifyTriangle(3, 4, 3), "Isosceles Triangle")
def testRandom1(self):
""" testing invalid values """
self.assertEqual(classifyTriangle(2.5, 5, 8), "Error")
self.assertEqual(classifyTriangle(650, 800, 33), "Error")
def testRandom2(self):
""" testing invalid values """
self.assertEqual(classifyTriangle(-5, 8, 12), "Error")
self.assertEqual(classifyTriangle(0, 2, 6), "Error")
def testRandom3(self):
""" invalid output testing """
self.assertNotEqual(classifyTriangle(3, 4, 3), "Equilateral Triangle")
self.assertNotEqual(classifyTriangle(3, 3, 6), "Right Triangle")
def testRandom4(self):
""" invalid output testing """
with self.assertRaises(TypeError):
classifyTriangle("a", 3, 5)
if __name__ == '__main__':
unittest.main(exit=False, verbosity=2)
| {"/TestTriangle.py": ["/Triangle.py"]} |
66,627 | JMasekar/Triangle567 | refs/heads/master | /Triangle.py | # -*- coding: utf-8 -*-
"""
Created on Thu Jan 14 13:44:00 2016
Updated Jan 21, 2018
The primary goal of this file is to demonstrate a simple python program to classify triangles
@author: jrr
@author: rk
"""
def classifyTriangle(a, b, c):
"""
Your correct code goes here... Fix the faulty logic below until the code passes all of
you test cases.
This function returns a string with the type of triangle from three integer values
corresponding to the lengths of the three sides of the Triangle.
return:
If all three sides are equal, return 'Equilateral'
If exactly one pair of sides are equal, return 'Isoceles'
If no pair of sides are equal, return 'Scalene'
If not a valid triangle, then return 'NotATriangle'
If the sum of any two sides equals the squate of the third side, then return 'Right'
BEWARE: there may be a bug or two in this code
"""
if a > 500 or b > 500 or c > 500:
return "Error"
if a <= 0 or b <= 0 or c <= 0:
return "Error"
if not (isinstance(a, int) and isinstance(b, int) and isinstance(c, int)):
return "Error"
if (a >= (b + c)) or (b >= (a + c)) or (c >= (a + b)):
return "It is not a Triangle"
if a == b and b == c and c == a:
return "Equilateral Triangle"
elif ((a * a) + (b * b)) == (c * c) or ((c * c) + (b * b)) == (a * a) or ((c * c) + (a * a)) == (b * b):
return "Right Triangle"
elif (a != b) and (b != c) and (a != c):
return "Scalene Triangle"
else:
return "Isosceles Triangle"
| {"/TestTriangle.py": ["/Triangle.py"]} |
66,629 | AntoineCrb/Raspberry-Master-Device | refs/heads/master | /manual.py | from pynput import keyboard
from communication.control import Control
control = Control()
def on_press(key):
if key.char == 'z':
print('forward')
control.forward()
elif key.char == 's':
print('backward')
control.backward()
elif key.char == 'd':
print('rightSpin')
control.right_spin()
elif key.char == 'q':
print('leftSpin')
control.left_spin()
elif key.char == 'r':
print('left1')
control.left1()
elif key.char == 't':
print('left2')
control.left2()
elif key.char == 'y':
print('left3')
control.left3()
elif key.char == 'u':
print('left4')
control.left4()
elif key.char == 'f':
print('right1')
control.right1()
elif key.char == 'g':
print('right2')
control.right2()
elif key.char == 'h':
print('right3')
control.right3()
elif key.char == 'j':
print('right4')
control.right4()
def on_release(key):
control.stop()
with keyboard.Listener(
on_press=on_press,
on_release=on_release) as listener:
listener.join()
| {"/manual.py": ["/communication/control.py"], "/test.py": ["/algo.py"], "/autonomous.py": ["/communication/control.py", "/algo.py"]} |
66,630 | AntoineCrb/Raspberry-Master-Device | refs/heads/master | /accelerometer.py | import matplotlib.pyplot as plt
import numpy as np
from datetime import datetime
from mpu6050 import mpu6050
from pynput import keyboard
import scipy.integrate as integrate
import time
plt.style.use('ggplot')
accel_X = []
accel_Y = []
accel_Z = []
avg_x = 0
avg_y = 0
avg_z = 0
accel_time = []
mpu = mpu6050(0x68)
def init():
print("--- initialisation")
mpu.set_accel_range(mpu.ACCEL_RANGE_2G)
mpu.set_gyro_range(mpu.GYRO_RANGE_250DEG)
def get_accel():
ac_data = mpu.get_accel_data()
accel_time.append(time.time())
accel_X.append(ac_data['x'])
accel_Y.append(ac_data['y'])
accel_Z.append(ac_data['z'])
def update_time():
for k in range(1, len(accel_time)):
accel_time[k] -= accel_time[0]
def reset_offset_accel():
global avg_x, avg_y, avg_z, accel_X, accel_Y, accel_Z, accel_time
avg_x = np.mean(accel_X[5:])
avg_y = np.mean(accel_Y[5:])
avg_z = np.mean(accel_Z[5:])
accel_X = []
accel_Y = []
accel_Z = []
accel_time = []
def update_accel():
for k in range(len(accel_X)):
accel_X[k] -= avg_x
for k in range(len(accel_Y)):
accel_Y[k] -= avg_y
for k in range(len(accel_Z)):
accel_Z[k] -= avg_z
def run():
print("running...")
init()
print("start getting offset...")
start = time.time()
while time.time() - start < 5:
get_accel()
reset_offset_accel()
start = time.time()
print("start real measure...")
while time.time() - start < 10:
get_accel()
print("printing graph...")
update_time()
update_accel()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(accel_time[5:], accel_X[5:])
axs[0, 0].set_title('accel X')
axs[0, 1].plot(accel_time[5:], integrate.cumtrapz(accel_X[5:], accel_time[5:], initial=0))
axs[0, 1].set_title('speed X')
axs[1, 0].plot(accel_time[5:], accel_Y[5:], 'tab:orange')
axs[1, 0].set_title('accel Y')
axs[1, 1].plot(accel_time[5:], integrate.cumtrapz(accel_Y[5:], accel_time[5:], initial=0), 'tab:orange')
axs[1, 1].set_title('speed Y')
axs[2, 0].plot(accel_time[5:], accel_Z[5:], 'tab:green')
axs[2, 0].set_title('accel Z')
axs[2, 1].plot(accel_time[5:], integrate.cumtrapz(accel_Z[5:], accel_time[5:], initial=0), 'tab:green')
axs[2, 1].set_title('speed Z')
plt.show()
run() | {"/manual.py": ["/communication/control.py"], "/test.py": ["/algo.py"], "/autonomous.py": ["/communication/control.py", "/algo.py"]} |
66,631 | AntoineCrb/Raspberry-Master-Device | refs/heads/master | /algo.py | import cv2
import numpy as np
import math
# get lines
def canny(image):
gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
kernel = 15 # it's important here to have a high value to prevent the light reflection of the road to cause any problem
blur = cv2.GaussianBlur(gray, (kernel, kernel), 0)
canny = cv2.Canny(blur, 50, 150)
return canny
def make_points(image, line):
slope, intercept = line
y1 = int(image.shape[0]) # bottom of the image
y2 = int(y1*3/5) # slightly lower than the middle
x1 = int((y1 - intercept)/slope)
x2 = int((y2 - intercept)/slope)
return [[x1, y1, x2, y2]]
def average_slope_intercept(image, lines):
left_fit = []
right_fit = []
if lines is None:
return None
for line in lines:
for x1, y1, x2, y2 in line:
fit = np.polyfit((x1, x2), (y1, y2), 1)
slope = fit[0]
intercept = fit[1]
if slope < 0: # y is reversed in image
left_fit.append((slope, intercept))
else:
right_fit.append((slope, intercept))
# add more weight to longer lines
average = [np.average(a, axis=0)
for a in [left_fit, right_fit] if len(a) >= 1]
points = [make_points(image, a) for a in average]
return points
def display_lines(img, lines):
line_image = np.zeros_like(img)
if lines is not None:
for line in lines:
for x1, y1, x2, y2 in line:
cv2.line(line_image, (x1, y1), (x2, y2), (255, 0, 0), 20)
return line_image
def region_of_interest(canny):
height = canny.shape[0]
width = canny.shape[1]
mask = np.zeros_like(canny)
polygon = np.array([[
(0, height),
(width, height),
(width, height-200),
(0, height-200),
]], np.int32)
cv2.fillPoly(mask, polygon, 255)
masked_image = cv2.bitwise_and(canny, mask)
return masked_image
def get_average_slopes(lines):
if lines is None:
return None
avg=0
for line in lines:
for x1, y1, x2, y2 in line:
a = math.atan((y2-y1)/(x2-x1))/(2*math.pi*len(lines))
avg += a - (a/abs(a))*0.15
return avg | {"/manual.py": ["/communication/control.py"], "/test.py": ["/algo.py"], "/autonomous.py": ["/communication/control.py", "/algo.py"]} |
66,632 | AntoineCrb/Raspberry-Master-Device | refs/heads/master | /test.py | import cv2
import numpy as np
import algo
import math
frame = cv2.imread('assets/testt.jpg')
frame = cv2.resize(frame, (640, 480))
frame_canny = algo.canny(frame)
cropped_canny = algo.region_of_interest(frame_canny)
lines = cv2.HoughLinesP(cropped_canny, 2, np.pi/180, 100, np.array([]), minLineLength=40, maxLineGap=5)
averaged_lines = algo.average_slope_intercept(cropped_canny, lines)
print(algo.get_average_slopes(averaged_lines))
line_image = algo.display_lines(cropped_canny, averaged_lines)
combo_image = cv2.addWeighted(cropped_canny, 0.8, line_image, 1, 1)
cv2.imshow("result", frame)
cv2.imshow("canny", frame_canny)
cv2.imshow("cropped canny", combo_image)
cv2.waitKey(0)
| {"/manual.py": ["/communication/control.py"], "/test.py": ["/algo.py"], "/autonomous.py": ["/communication/control.py", "/algo.py"]} |
66,633 | AntoineCrb/Raspberry-Master-Device | refs/heads/master | /autonomous.py | import cv2
import numpy as np
from imutils.video import VideoStream, FPS
import time
from communication.control import Control
import algo
control = Control()
vs = VideoStream(usePiCamera=True, resolution=(640, 480), framerate=32, rotation=180)
fps = FPS()
def init():
vs.start()
time.sleep(2.0)
fps.start()
try:
loop()
except:
print('error')
control.stop()
def loop():
while True:
frame = vs.read()
canny = algo.canny(frame)
cropped_canny = algo.region_of_interest(canny)
lines = cv2.HoughLinesP(cropped_canny, 2, np.pi/180, 100, np.array([]), minLineLength=40, maxLineGap=5)
averaged_lines = algo.average_slope_intercept(cropped_canny, lines)
line_image = algo.display_lines(cropped_canny, averaged_lines)
combo_image = cv2.addWeighted(cropped_canny, 0.8, line_image, 1, 1)
x = algo.get_average_slopes(averaged_lines)
if x is None: control.stop()
elif -0.15 < x < -0.08: control.left2()
elif x < -0.05: control.left1()
elif x < 0.05: control.forward()
elif x < 0.08: control.right1()
elif x < 0.15: control.right2()
else: control.stop()
if x is not None: print(x)
cv2.imshow("Result", combo_image)
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("q"):
control.stop()
break
fps.update()
fps.stop()
print("[INFO] elapsed time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
cv2.destroyAllWindows()
vs.stop()
init() | {"/manual.py": ["/communication/control.py"], "/test.py": ["/algo.py"], "/autonomous.py": ["/communication/control.py", "/algo.py"]} |
66,634 | AntoineCrb/Raspberry-Master-Device | refs/heads/master | /communication/control.py | import RPi.GPIO as GPIO
# rpi pins - GPIO BCM mode
a = 26
b = 19
c = 13
d = 6
class Control:
current=0
def __init__(self):
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(a, GPIO.OUT)
GPIO.setup(b, GPIO.OUT)
GPIO.setup(c, GPIO.OUT)
GPIO.setup(d, GPIO.OUT)
def get_current_state(self): return self.current
def set_number(self, n): # convert decimal number to binary information with pins
self.current=n
if (n//8==1):
n-=8
GPIO.output(d, GPIO.HIGH)
else: GPIO.output(d, GPIO.LOW)
if (n//4==1):
n-=4
GPIO.output(c, GPIO.HIGH)
else: GPIO.output(c, GPIO.LOW)
if (n//2==1):
n-=2
GPIO.output(b, GPIO.HIGH)
else: GPIO.output(b, GPIO.LOW)
if (n==1): GPIO.output(a, GPIO.HIGH)
else: GPIO.output(a, GPIO.LOW)
def stop(self): self.set_number(0)
def forward(self): self.set_number(1)
def backward(self): self.set_number(2)
def right_spin(self): self.set_number(3)
def left_spin(self): self.set_number(4)
def right1(self): self.set_number(5)
def right2(self): self.set_number(6)
def right3(self): self.set_number(7)
def right4(self): self.set_number(8)
def left1(self): self.set_number(9)
def left2(self): self.set_number(10)
def left3(self): self.set_number(11)
def left4(self): self.set_number(12) | {"/manual.py": ["/communication/control.py"], "/test.py": ["/algo.py"], "/autonomous.py": ["/communication/control.py", "/algo.py"]} |
66,638 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/__init__.py | # -*- coding: utf-8 -*-
from cloudmind.blueprint import apiv1
from cloudmind.config.config import Config
from flask import Flask
from flask import redirect
from flask import render_template
from flask import request
from flask import session
from flask import url_for
from flask_mail import Mail
from flask_restful import Api
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__, static_url_path='')
app.secret_key = 'development'
app.config.from_object(Config)
db = SQLAlchemy(app)
mail = Mail(app)
api = Api(apiv1)
from cloudmind.restful.v1 import label
from cloudmind.restful.v1 import label_palette
from cloudmind.restful.v1 import leaf
from cloudmind.restful.v1 import node
from cloudmind.restful.v1 import profile
from cloudmind.restful.v1 import root
api.add_resource(label.LabelAdd, '/label/add')
api.add_resource(label.LabelRemove, '/label/remove')
api.add_resource(label_palette.PaletteList, '/label_palette/list')
api.add_resource(label_palette.PaletteAdd, '/label_palette/add')
api.add_resource(label_palette.PaletteRemove, '/label_palette/remove')
api.add_resource(label_palette.PaletteUpdate, '/label_palette/update')
api.add_resource(leaf.LeafList, '/leaf/list')
api.add_resource(leaf.LeafUpload, '/leaf/upload', endpoint='leaf_upload')
api.add_resource(leaf.LeafUpload, '/leaf/add', endpoint='leaf_add')
api.add_resource(leaf.LeafRemove, '/leaf/remove')
api.add_resource(leaf.LeafUpdate, '/leaf/update')
api.add_resource(node.NodeList, '/node/list')
api.add_resource(node.NodeAdd, '/node/add')
api.add_resource(node.NodeRemove, '/node/remove')
api.add_resource(node.NodeUpdate, '/node/update')
api.add_resource(profile.Profile, '/profile')
api.add_resource(profile.ProfileUpload, '/profile/upload')
api.add_resource(profile.ProfileSearch, '/profile/search')
api.add_resource(root.RootList, '/root/list')
api.add_resource(root.RootInvite, '/root/invite')
# Blueprint Register
app.register_blueprint(apiv1)
from cloudmind.oauth import google
@app.route('/')
def index():
if 'google_token' in session:
user_id = google.get('userinfo').data.get('id', None)
if user_id is not None:
from cloudmind.model.user import User
user = db.session.query(User).filter(User.oauth_id == user_id).first()
if user is not None:
session['user_id'] = user.id
return render_template('app.html')
return render_template('login.html')
@app.route('/login')
def login():
return google.authorize(callback=url_for('authorized', _external=True))
@app.route('/logout')
def logout():
session.pop('user_id', None)
session.pop('google_token', None)
return redirect(url_for('index'))
@app.route('/oauth2callback', defaults={'participant_id': None})
def authorized(participant_id):
resp = google.authorized_response()
if resp is None:
return 'Access denied: reason=%s error=%s' % (
request.args['error_reason'],
request.args['error_description']
)
session['google_token'] = (resp['access_token'], '')
userinfo = google.get('userinfo').data
from cloudmind.model.user import User
user = db.session.query(User).filter(User.oauth_id == userinfo['id']).first()
"""DB에 없다면 추가하기"""
if user is None:
user = User(
login_method='google',
oauth_id=userinfo['id'],
name=(userinfo['given_name'] + ' ' + userinfo['family_name']),
email=userinfo['email'],
picture=userinfo['picture']
)
db.session.add(user)
else:
user.name = (userinfo['given_name'] + ' ' + userinfo['family_name'])
user.picture = userinfo['picture']
db.session.commit()
session['user_id'] = user.id
if participant_id is None:
return redirect(url_for('index'))
else:
return redirect(url_for('invite_ok', participant_id=participant_id))
@app.route('/invite/<int:participant_id>')
def invite_ok(participant_id):
if 'user_id' not in session:
return google.authorize(callback=url_for('authorized', participant_id=participant_id, _external=True))
from cloudmind.model.participant import Participant
participant = db.session.query(Participant).filter(Participant.id == participant_id).first()
if participant is None or participant.user_id != session['user_id']:
return redirect(url_for('index'))
participant.is_accepted = True
db.session.commit()
return redirect(url_for('index'))
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
"""
@web.route('/')
def index():
if 'user_idx' in session:
return render_template('app.html')
else:
return render_template('intro.html')
"""
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,639 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/blueprint.py | from flask import Blueprint
apiv1 = Blueprint('apiV1', __name__, url_prefix='/api/v1')
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,640 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/label.py | from cloudmind import db
class Label(db.Model):
__tablename__ = 'label'
id = db.Column(db.Integer, primary_key=True)
own_node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
palette_id = db.Column(db.Integer, db.ForeignKey('label_palette.id'))
# relationship
own_node = db.relationship('Node', backref="labels")
palette = db.relationship('LabelPalette')
def __repr__(self):
return '<Label %r>' % self.id
@property
def serialize(self):
return self.palette_id
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,641 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/restful/v1/root.py | from cloudmind import db
from cloudmind.mail import mail_send_invite
from cloudmind.model.node import Node
from cloudmind.model.participant import Participant
from cloudmind.model.user import User
from flask import request
from flask import session
from flask_restful import abort
from flask_restful import Resource
import json
from sqlalchemy.sql.expression import true
class RootList(Resource):
def get(self):
if 'user_id' not in session:
abort(403, message="already logged out")
participants = db.session.query(Participant).filter(Participant.user_id == session['user_id'])\
.filter(Participant.is_accepted == true())\
.all()
node_list = []
for item in participants:
# if is root node
if item.own_node.parent_node_id is None:
node_list.append(item.own_node.serialize_root)
return {
'success': True,
"node_list": node_list
}
class RootInvite(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
root_id = args['root_idx']
email = args['email']
if 'user_id' not in session:
abort(403, message="already logged out")
root_node = db.session.query(Node).filter(Node.id == root_id).first()
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
user = User.query.filter(User.email == email).first()
from_user = User.query.filter(User.id == session['user_id']).first()
if user is None:
abort(404, message="Not found {}".format("User"))
participant_check = db.session.query(Participant)\
.filter(Participant.own_node_id == root_id)\
.filter(Participant.user_id == user.id).first()
if participant_check is not None:
abort(403, message="이미 초대중이거나 멤버에 속해 있습니다.")
participant = Participant()
participant.is_accepted = False
participant.own_node = root_node
participant.user = user
participant.from_user = from_user
db.session.add(participant)
db.session.commit()
mail_send_invite(participant)
return {
'success': True
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,642 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/contact.py | from cloudmind import db
class Contact(db.Model):
__tablename__ = 'contact'
id = db.Column(db.Integer, primary_key=True)
from_user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
to_user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
# relationship
from_user = db.relationship('User', primaryjoin='Contact.from_user_id == User.id')
to_user = db.relationship('User', primaryjoin='Contact.to_user_id == User.id')
def __repr__(self):
return '<Contact %r>' % self.id
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,643 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/participant.py | from cloudmind import db
class Participant(db.Model):
__tablename__ = 'participant'
id = db.Column(db.Integer, primary_key=True)
is_accepted = db.Column(db.Boolean, default=False)
own_node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
from_user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
# relationship
own_node = db.relationship('Node')
user = db.relationship('User', primaryjoin='Participant.user_id == User.id')
from_user = db.relationship('User', primaryjoin='Participant.from_user_id == User.id')
def __repr__(self):
return '<Participant %r>' % self.id
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,644 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/restful/v1/label_palette.py | from cloudmind import db
from cloudmind.model.label_palette import LabelPalette
from cloudmind.model.node import Node
from flask import request
from flask import session
from flask_restful import abort
from flask_restful import Resource
import json
class PaletteList(Resource):
def get(self):
root_id = request.args.get('root_idx')
if 'user_id' not in session:
abort(403, message="already logged out")
root_node = db.session.query(Node).filter(Node.id == root_id).first()
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
palettes = db.session.query(LabelPalette).filter(LabelPalette.root_node_id == root_id).all()
return {
'success': True,
'label_palette_list': [i.serialize for i in palettes]
}
class PaletteAdd(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
root_id = args['root_idx']
name = args['name']
color = args['color']
if 'user_id' not in session:
abort(403, message="already logged out")
root_node = db.session.query(Node).filter(Node.id == root_id).first()
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
palettes = db.session.query(LabelPalette).filter(LabelPalette.root_node_id == root_id).all()
if len(palettes) >= 8:
abort(400, message="필레트 8개보다 많음")
palette = LabelPalette(root_node_id=root_id, name=name, color=color)
db.session.add(palette)
db.session.commit()
return {
"success": True,
"palette": palette.serialize
}
class PaletteRemove(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
palette_id = args['palette_idx']
if 'user_id' not in session:
abort(403, message="already logged out")
palette = db.session.query(LabelPalette).filter(LabelPalette.id == palette_id).first()
if palette is None:
abort(404, message="Not found {}".format("Palette"))
root_node = db.session.query(Node).filter(Node.id == palette.root_node_id).first()
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
db.session.delete(palette)
db.session.commit()
palettes = db.session.query(LabelPalette).filter(LabelPalette.root_node_id == root_node.id).all()
nodes = db.session.query(Node).filter(Node.root_node_id == root_node.id).all()
return {
"success": True,
"palette_list": [i.serialize for i in palettes],
"node_list": [i.serialize for i in nodes]
}
class PaletteUpdate(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
palette_id = args['palette_idx']
name = args['name']
color = args['color']
if 'user_id' not in session:
abort(403, message="already logged out")
palette = db.session.query(LabelPalette).filter(LabelPalette.id == palette_id).first()
if palette is None:
abort(404, message="Not found {}".format("Palette"))
root_node = db.session.query(Node).filter(Node.id == palette.root_node_id).first()
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
palette.name = name
palette.color = color
db.session.commit()
return {"success": True, 'palette': palette.serialize}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,645 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/label_palette.py | from cloudmind import db
class LabelPalette(db.Model):
__tablename__ = 'label_palette'
id = db.Column(db.Integer, primary_key=True)
color = db.Column(db.Integer)
name = db.Column(db.String(20), default='')
root_node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
# relationship
root_node = db.relationship('Node')
def __repr__(self):
return '<LabelPalette %r>' % self.id
@property
def serialize(self):
return {
'palette_idx': self.id,
'color': self.color,
'name': self.name,
'root_idx': self.root_node_id
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,646 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/restful/v1/profile.py | from cloudmind.blueprint import apiv1
from cloudmind import db
from cloudmind.model.user import User
from flask import current_app
from flask import redirect
from flask import request
from flask import send_file
from flask import session
from flask_restful import abort
from flask_restful import Resource
import json
import magic
import os
import time
from urllib.parse import quote
import uuid
from werkzeug import secure_filename
@apiv1.route('/profile/img/<int:user_id>')
def profile_download(user_id):
user = db.session.query(User).filter(User.id == user_id).first()
if user is None:
return "Not found {}".format("User")
if user.picture[0:4] == 'http':
return redirect(user.picture)
else:
return send_file(
user.picture,
as_attachment=True,
attachment_filename=quote(user.name),
mimetype=magic.from_file(user.picture, mime=True).decode('utf-8')
)
class Profile(Resource):
def get(self):
if 'user_id' not in session:
abort(403, message="already logged out")
user = db.session.query(User).filter(User.id == session['user_id']).first()
return {
'success': True,
'profile': user.serialize
}
def post(self):
args = json.loads(request.data.decode('utf-8'))
name = args['name']
if 'user_id' not in session:
abort(403, message="already logged out")
user = db.session.query(User).filter(User.id == session['user_id']).first()
user.name = name
db.session.commit()
return {
'success': True,
'profile': user.serialize
}
class ProfileUpload(Resource):
def post(self):
if 'user_id' not in session:
abort(403, message="already logged out")
user_picture = request.files['user_picture']
filename = secure_filename(str(int(time.time() * 1000))+'_'+uuid.uuid4().hex)
filepath = os.path.join(current_app.config['UPLOAD_DIR'], filename)
user_picture.save(filepath)
mimetype = magic.from_file(filepath, mime=True)
if mimetype[0:5] != b'image':
abort(403, message="이미지파일이 아닙니다. : {0}".format(mimetype))
user = db.session.query(User).filter(User.id == session['user_id']).first()
user.picture = filepath
db.session.commit()
return {
'success': True,
'profile': user.serialize
}
class ProfileSearch(Resource):
def get(self):
email = request.args.get('email', '')
name = request.args.get('name', '')
users_query = User.query
not_null_filters = []
if email != '':
not_null_filters.append(User.email.like('{0}%'.format(email)))
if name != '':
not_null_filters.append(User.name.like('%{0}%'.format(name)))
if len(not_null_filters) > 0:
users_query = users_query.filter(db.or_(*not_null_filters))
users_serialize = [i.serialize for i in users_query.all()]
else:
users_serialize = []
return {
'success': True,
'profile': users_serialize
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,647 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/mail.py | from cloudmind import mail
from flask.ext.mail import Message
from flask import url_for
def mail_send_invite(participant):
from_user = participant.from_user
to_user = participant.user
msg = Message(
'Hello',
sender='cloudmindswmaestro@gmail.com',
recipients=[to_user.email])
msg.html = \
"""
{from_user.name}({from_user.email})님이 {to_user.name}({to_user.email})님을 '{project_name}'에 초대하였습니다.<br/>
<a href="{invite_url}" >수락하기</a>
""".format(
from_user=from_user,
to_user=to_user,
project_name=participant.own_node.name,
invite_url=url_for('invite_ok', _external=True, participant_id=participant.id)
)
mail.send(msg)
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,648 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/__init__.py | __all__ = ['contact', 'label', 'label_palette', 'leaf', 'node', 'participant', 'user']
from cloudmind.model import contact
from cloudmind.model import label
from cloudmind.model import label_palette
from cloudmind.model import leaf
from cloudmind.model import node
from cloudmind.model import participant
from cloudmind.model import user
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,649 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/node.py | from cloudmind import db
from cloudmind.model.label import Label
from cloudmind.model.label_palette import LabelPalette
from cloudmind.model.leaf import Leaf
from cloudmind.model.participant import Participant
from cloudmind.model.user import User
import datetime
from sqlalchemy.sql.expression import true
class Node(db.Model):
__tablename__ = 'node'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100))
creation_date = db.Column(db.DateTime, default=datetime.datetime.utcnow())
due_date = db.Column(db.DateTime, default=datetime.datetime.utcnow())
description = db.Column(db.Text)
creator_id = db.Column(db.Integer, db.ForeignKey('user.id'))
root_node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
parent_node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
# relationship
creator = db.relationship('User')
root_node = db.relationship(
'Node',
foreign_keys='Node.root_node_id',
remote_side=[id],
post_update=True
)
parent_node = db.relationship(
'Node',
backref=db.backref('child_nodes', order_by=id),
foreign_keys='Node.parent_node_id',
remote_side=[id],
post_update=True
)
# child_nodes = db.relationship('Node', backref="parent_node", foreign_keys='Node.parent_node_id')
# leafs = db.relationship('Leaf', order_by="Leaf.id", backref="node")
# members = db.relationship('User', secondary=Participant)
def __repr__(self):
return '<Node %r>' % self.name
def check_member(self, user_id):
if(db.session.query(Participant).
filter(Participant.own_node_id == self.id).
filter(Participant.user_id == user_id).
filter(Participant.is_accepted == true())):
return True
else:
return False
@property
def serialize(self):
return {
'node_idx': self.id,
'name': self.name,
'creation_date': self.creation_date.isoformat(),
'due_date': self.due_date.isoformat() if self.due_date is not None else None,
'description': self.description,
'creator_id': self.creator_id,
'root_idx': self.root_node_id,
'parent_idx': self.parent_node_id,
'leafs': self.serialize_leafs,
'assigned_users': self.serialize_member,
'labels': self.serialize_labels
}
@property
def serialize_labels(self):
return [item.serialize for item in self.labels]
@property
def serialize_leafs(self):
return [item.serialize for item in self.leafs]
@property
def serialize_member(self):
members = db.session.query(Participant).\
filter(Participant.own_node_id == self.id).\
filter(Participant.is_accepted == true()).\
all()
return [item.user_id for item in members]
@property
def serialize_member_detail(self):
members = db.session.query(Participant).\
filter(Participant.own_node_id == self.id).\
filter(Participant.is_accepted == true()).\
all()
return [db.session.query(User).filter(User.id == item.user_id).first().serialize for item in members]
@property
def serialize_root(self):
return {
'node': self.serialize,
'user': self.serialize_member_detail
}
def remove_all(self):
for item in self.child_nodes:
item.remove_all()
db.session.query(Label).filter(Label.own_node_id == self.id).delete()
db.session.query(LabelPalette).filter(LabelPalette.root_node_id == self.id).delete()
db.session.query(Participant).filter(Participant.own_node_id == self.id).delete()
db.session.query(Leaf).filter(Leaf.parent_node_id == self.id).delete()
db.session.delete(self)
db.session.commit()
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,650 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/restful/v1/node.py | from cloudmind import db
from cloudmind.model.label_palette import LabelPalette
from cloudmind.model.node import Node
from cloudmind.model.participant import Participant
from cloudmind.model.user import User
from dateutil import parser as date_parser
from flask import request
from flask import session
from flask_restful import abort
from flask_restful import Resource
import json
from sqlalchemy.sql.expression import true
class NodeList(Resource):
def get(self):
root_id = request.args.get('root_idx')
if 'user_id' not in session:
abort(403, message="already logged out")
root_node = db.session.query(Node).filter(Node.id == root_id).first()
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
nodes = db.session.query(Node).filter(Node.root_node_id == root_id).all()
return {
'success': True,
'node_list': [i.serialize for i in nodes]
}
class NodeAdd(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
root_id = args.get('root_idx', None)
parent_node_id = args.get('parent_node_idx', None)
node_name = args.get('node_name', None)
description = args.get('description', None)
if 'user_id' not in session:
abort(403, message="already logged out")
if parent_node_id is not None:
parent_node = db.session.query(Node).filter(Node.id == parent_node_id).first()
if parent_node is None:
abort(404, message="Not found {}".format("parent_node"))
if parent_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
if root_id is not None:
if parent_node_id is None:
abort(404, message="일반 노드에서 parent_node_id는 필수입니다.")
root_node = db.session.query(Node).filter(Node.id == root_id).first()
if root_node is None:
abort(404, message="Not found {}".format("root_node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
creator = User.query.filter(User.id == session['user_id']).first()
node = Node(name=node_name, description=description)
if root_id is not None:
node.root_node = root_node
else:
node.root_node = node
if parent_node_id is not None:
node.parent_node = parent_node
node.creator = creator
db.session.add(node)
db.session.commit()
# 루트노드 일 경우에만 맴버로 등록 후 기본 팔레트 추가
if root_id is None:
participant = Participant(is_accepted=True)
participant.is_accepted = True
participant.own_node = node
participant.user = creator
participant.from_user = creator
db.session.add(participant)
db.session.commit()
colors = [
0x61bd4f,
0xf2d600,
0xffab4a,
0xeb5a46,
0xc377e0,
0x0079bf,
0xff80ce,
0x4d4d4d
]
for color in colors:
palette = LabelPalette(root_node_id=node.id, color=color)
db.session.add(palette)
db.session.commit()
nodes = db.session.query(Node).filter(Node.root_node_id == root_id).all()
return {
"success": True,
"node": node.serialize,
"user": node.serialize_member_detail,
'node_list': [i.serialize for i in nodes]
}
class NodeRemove(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
node_id = args['node_idx']
if 'user_id' not in session:
abort(403, message="already logged out")
node = db.session.query(Node).filter(Node.id == node_id).first()
if node is None:
abort(404, message="Not found {}".format("Node"))
root_node = node.root_node
if root_node is None:
abort(404, message="Not found {}".format("root_node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
node.remove_all()
nodes = db.session.query(Node).filter(Node.root_node_id == root_node.id).all()
return {
'success': True,
'node_list': [i.serialize for i in nodes]
}
class NodeUpdate(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
node_id = args['node_idx']
node_name = args['node_name']
description = args['description']
due_date = date_parser.parse(args['due_date'])
users = args['assigned_users']
parent_node_id = args.get('parent_node_idx', None)
if 'user_id' not in session:
abort(403, message="already logged out")
node = db.session.query(Node).filter(Node.id == node_id).first()
if node is None:
abort(404, message="Not found {}".format("Node"))
root_node = node.root_node
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
if parent_node_id is not None:
parent_node = db.session.query(Node).filter(Node.id == parent_node_id).first()
if parent_node is None:
abort(404, message="Not found {}".format("parent_node"))
if parent_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
node.name = node_name
node.description = description
node.due_date = due_date
if parent_node_id is not None:
node.parent_node_id = parent_node_id
db.session.query(Participant)\
.filter(Participant.own_node_id == node_id)\
.filter(Participant.is_accepted == true())\
.delete()
for user_id in users:
participant = Participant(
is_accepted=True,
own_node_id=node_id,
user_id=user_id,
from_user_id=session['user_id']
)
db.session.add(participant)
pass
db.session.commit()
nodes = db.session.query(Node).filter(Node.root_node_id == root_node.id).all()
return {
"success": True,
'node_list': [i.serialize for i in nodes]
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,651 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/user.py | from cloudmind import db
import datetime
from flask import url_for
class User(db.Model):
__tablename__ = 'user'
METHOD_GOOGLE = 0
id = db.Column(db.Integer, primary_key=True)
login_method = db.Column(db.Integer)
# 고유ID
oauth_id = db.Column(db.Integer)
name = db.Column(db.String(10))
email = db.Column(db.String(70), unique=True)
picture = db.Column(db.String(150))
creation_date = db.Column(db.Date, default=datetime.date.today())
def __init__(self, login_method, oauth_id, name, email, picture):
self.login_method = login_method
self.oauth_id = oauth_id
self.name = name
self.email = email
self.picture = picture
def __repr__(self):
return '<User %r>' % self.email
@property
def serialize(self):
return {
'account_id': self.id,
'name': self.name,
'email': self.email,
'profile_url': url_for('.profile_download', user_id=self.id)
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,652 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/model/leaf.py | from cloudmind import db
import datetime
import magic
import PIL
from PIL import Image
class Leaf(db.Model):
__tablename__ = 'leaf'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100))
creation_date = db.Column(db.DateTime, default=datetime.datetime.utcnow())
file_path = db.Column(db.Text)
file_type = db.Column(db.String(50))
creator_id = db.Column(db.Integer, db.ForeignKey('user.id'))
parent_node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
root_node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
# relationship
creator = db.relationship('User')
root_node = db.relationship('Node', primaryjoin='Leaf.root_node_id == Node.id')
parent_node = db.relationship(
'Node', primaryjoin='Leaf.parent_node_id == Node.id', backref=db.backref('leafs', order_by=id)
)
def __init__(self, name, file_path):
self.name = name
self.file_path = file_path
self.file_type = magic.from_file(file_path, mime=True).decode('utf-8')
if self.file_type[0:5] == 'image':
im = Image.open(file_path)
w, h = im.size
size = ()
if w >= h:
size = ((int)(100*w/h), 100)
else:
size = (100, (int)(100*h/w))
im = im.resize(size)
im.save(file_path + ".thumbnail", "JPEG")
def __repr__(self):
return '<Leaf %r>' % self.name
@property
def serialize(self):
return {
'id': self.id,
'name': self.name,
'creation_date': self.creation_date.isoformat(),
'file_type': self.file_type,
'creator_id': self.creator_id,
'parent_node_id': self.parent_node_id
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,653 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/config/default_config.py | # -*- coding: utf-8 -*-
import os.path
import tempfile
class Config(object):
# Server
SERVER_NAME = 'yourdomain.com'
# Database
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(tempfile.gettempdir(), 'test.db')
# Path
UPLOAD_DIR = os.path.abspath(os.path.dirname(__file__) + '/../..') + '/cloudmind/files'
# OAuth
OAUTH_GOOGLE_CLIENTID = ''
OAUTH_GOOGLE_SECRETKEY = ''
# EMAIL
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USERNAME = 'you@google.com'
MAIL_PASSWORD = 'GooglePasswordHere'
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,654 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/restful/v1/label.py | from cloudmind import db
from cloudmind.model.label import Label
from cloudmind.model.node import Node
from flask import request
from flask import session
from flask_restful import abort
from flask_restful import Resource
import json
class LabelAdd(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
node_id = args['node_idx']
palette_id = args['palette_idx']
if 'user_id' not in session:
abort(403, message="already logged out")
own_node = db.session.query(Node).filter(Node.id == node_id).first()
if own_node is None:
abort(404, message="Not found {}".format("Node"))
if own_node.check_member(session['user_id']):
label = Label(own_node_id=node_id, palette_id=palette_id)
db.session.add(label)
db.session.commit()
else:
abort(404, message="노드멤버 아님")
nodes = db.session.query(Node).filter(Node.root_node_id == own_node.root_node_id).all()
return {
"success": True,
'node_list': [i.serialize for i in nodes]
}
class LabelRemove(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
node_id = args['node_idx']
palette_id = args['palette_idx']
if 'user_id' not in session:
abort(403, message="already logged out")
own_node = db.session.query(Node).filter(Node.id == node_id).first()
if own_node is None:
abort(404, message="Not found {}".format("Node"))
if own_node.check_member(session['user_id']):
db.session.query(Label).filter(Label.own_node_id == node_id).filter(Label.palette_id == palette_id).delete()
db.session.commit()
else:
abort(404, message="노드멤버 아님")
nodes = db.session.query(Node).filter(Node.root_node_id == own_node.root_node_id).all()
return {
"success": True,
'node_list': [i.serialize for i in nodes]
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,655 | CloudMind6-6/CloudMind | refs/heads/combine | /cloudmind/restful/v1/leaf.py | from cloudmind.blueprint import apiv1
from cloudmind import db
from cloudmind.model.leaf import Leaf
from cloudmind.model.node import Node
from cloudmind.model.user import User
from flask import current_app
from flask import request
from flask import send_file
from flask import session
from flask_restful import abort
from flask_restful import Resource
import json
import os
import time
from urllib.parse import quote
import uuid
from werkzeug import secure_filename
@apiv1.route('/leaf/<int:leaf_id>')
def leaf_download(leaf_id):
is_thumbnail = request.args.get('t', None)
if 'user_id' not in session:
return "already logged out"
leaf = db.session.query(Leaf).filter(Leaf.id == leaf_id).first()
if leaf is None:
return "Not found {}".format("Leaf")
parent_node = leaf.parent_node
if parent_node is None:
return "Not found {}".format("Node")
if parent_node.check_member(session['user_id']) is False:
return "노드멤버 아님"
if is_thumbnail == 'true' and leaf.file_type[:5] == 'image':
return send_file(
leaf.file_path + ".thumbnail",
as_attachment=True,
attachment_filename='thumbnail.jpg',
mimetype='image/jpeg'
)
return send_file(
leaf.file_path,
as_attachment=True,
attachment_filename=quote(leaf.name),
mimetype=leaf.file_type
)
class LeafList(Resource):
def get(self):
root_id = request.args.get('root_idx')
if 'user_id' not in session:
abort(403, message="already logged out")
root_node = db.session.query(Node).filter(Node.id == root_id).first()
if root_node is None:
abort(404, message="Not found {}".format("Node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
leafs = db.session.query(Leaf).filter(Leaf.root_node_id == root_id).all()
return {
'success': True,
'leaf_list': [i.serialize for i in leafs]
}
class LeafUpload(Resource):
def post(self):
userfile = request.files['userfile']
parent_node_id = request.form.get('node_parent_idx')
if 'user_id' not in session:
abort(403, message="already logged out")
parent_node = db.session.query(Node).filter(Node.id == parent_node_id).first()
if parent_node is None:
abort(404, message="Not found {}".format("Node"))
if parent_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
name = userfile.filename
filename = secure_filename(str(int(time.time() * 1000))+'_'+uuid.uuid4().hex)
filepath = os.path.join(current_app.config['UPLOAD_DIR'], filename)
userfile.save(filepath)
creator = User.query.filter(User.id == session['user_id']).first()
leaf = Leaf(name=name, file_path=filepath)
leaf.creator = creator
leaf.parent_node = parent_node
leaf.root_node = parent_node.root_node
db.session.add(leaf)
db.session.commit()
nodes = db.session.query(Node).filter(Node.root_node_id == parent_node.root_node_id).all()
return {
"success": True,
"leaf": leaf.serialize,
'node_list': [i.serialize for i in nodes]
}
class LeafRemove(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
leaf_id = args['leaf_idx']
if 'user_id' not in session:
abort(403, message="already logged out")
leaf = db.session.query(Leaf).filter(Leaf.id == leaf_id).first()
if leaf is None:
abort(404, message="Not found {}".format("Leaf"))
root_node = leaf.root_node
if root_node is None:
abort(404, message="Not found {}".format("root_node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
db.session.delete(leaf)
db.session.commit()
leafs = db.session.query(Leaf).filter(Leaf.root_node_id == root_node.id).all()
return {
'success': True,
'leaf_list': [i.serialize for i in leafs]
}
class LeafUpdate(Resource):
def post(self):
args = json.loads(request.data.decode('utf-8'))
leaf_id = args['leaf_idx']
node_parent_id = args['node_parent_idx']
if 'user_id' not in session:
abort(403, message="already logged out")
leaf = db.session.query(Leaf).filter(Leaf.id == leaf_id).first()
if leaf is None:
abort(404, message="Not found {}".format("Leaf"))
root_node = leaf.root_node
if root_node is None:
abort(404, message="Not found {}".format("root_node"))
if root_node.check_member(session['user_id']) is False:
abort(404, message="노드멤버 아님")
leaf.parent_node_id = node_parent_id
db.session.commit()
leafs = db.session.query(Leaf).filter(Leaf.root_node_id == root_node.id).all()
return {
'success': True,
"leaf": leaf.serialize,
'leaf_list': [i.serialize for i in leafs]
}
| {"/cloudmind/__init__.py": ["/cloudmind/blueprint.py", "/cloudmind/model/user.py", "/cloudmind/model/participant.py"], "/cloudmind/model/label.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/root.py": ["/cloudmind/__init__.py", "/cloudmind/mail.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/contact.py": ["/cloudmind/__init__.py"], "/cloudmind/model/participant.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label_palette.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py"], "/cloudmind/model/label_palette.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/profile.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/user.py"], "/cloudmind/mail.py": ["/cloudmind/__init__.py"], "/cloudmind/model/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/leaf.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/restful/v1/node.py": ["/cloudmind/__init__.py", "/cloudmind/model/label_palette.py", "/cloudmind/model/node.py", "/cloudmind/model/participant.py", "/cloudmind/model/user.py"], "/cloudmind/model/user.py": ["/cloudmind/__init__.py"], "/cloudmind/model/leaf.py": ["/cloudmind/__init__.py"], "/cloudmind/restful/v1/label.py": ["/cloudmind/__init__.py", "/cloudmind/model/label.py", "/cloudmind/model/node.py"], "/cloudmind/restful/v1/leaf.py": ["/cloudmind/blueprint.py", "/cloudmind/__init__.py", "/cloudmind/model/leaf.py", "/cloudmind/model/node.py", "/cloudmind/model/user.py"]} |
66,656 | dnuwa/level-up | refs/heads/master | /test_signup.py | import unittest
from signup import Signup
class TestSignup(unittest.TestCase):
def setUp(self):
self.user1 = Signup('daniel', 'nuwa', 'daniel@gmail.com')
self.user2 = Signup('jose','bugingo', 'jose@gmail.com')
def test_class_created(self):
self.assertIsInstance(self.user1, Signup)
def test_combine_names(self):
result = self.user1.combined_name()
self.assertEqual(result, {'daniel nuwa'})
def test_submit(self):
result = self.user1.submit()
self.assertEqual(result, [{'first_name':'daniel','last_name':'nuwa', 'email':'daniel@gmail.com'}])
self.assertIsInstance(result, list)
def test_validate_email(self):
result = self.user1.validate_email('daniel@gmail.com')
self.assertTrue(result)
def test_validate_email_returns_false(self):
result = self.user1.validate_email('daniel@gmailcom')
self.assertFalse(result)
| {"/test_signup.py": ["/signup.py"]} |
66,657 | dnuwa/level-up | refs/heads/master | /signup.py | import re
class Signup():
def __init__(self, first_name, last_name, email_address):
self.first_name = str(first_name)
self.last_name = str(last_name)
self.email_address = Signup.validate_email(email_address)
self.database= []
def combined_name(self):
if not (self.first_name and self.last_name):
return {'response':'first name and last name fields can not be empty'}
else:
return {self.first_name+" " + self.last_name}
def submit(self):
if not (self.first_name and self.last_name):
return {'response':'first name and last name fields can not be empty'}
else:
self.users = {'first_name': self.first_name,
'last_name': self.last_name, 'email': self.email_address}
self.database.append(self.users)
return self.database
@staticmethod
def validate_email(email):
if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
return False
else:
return email
new_user = Signup("", "", "bulega@gmail.com")
new_user.combined_name()
new_user.submit()
| {"/test_signup.py": ["/signup.py"]} |
66,740 | Prabithapallat01/Bank_app | refs/heads/master | /bank/migrations/0002_auto_20210512_2009.py | # Generated by Django 3.1.7 on 2021-05-12 14:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bank', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='branch',
name='branch_code',
),
migrations.AddField(
model_name='branch',
name='account_number',
field=models.CharField(default=True, max_length=10),
),
migrations.AddField(
model_name='branch',
name='balance',
field=models.IntegerField(default=0),
),
]
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,741 | Prabithapallat01/Bank_app | refs/heads/master | /bank/models.py | from django.db import models
# Create your models here
class Branch(models.Model):
branch_name=models.CharField(max_length=250)
account_number=models.IntegerField(default=True)
bank_name=models.CharField(max_length=250)
customer_name=models.CharField(max_length=250)
address=models.CharField(max_length=250)
balance=models.IntegerField(default=0)
def __str__(self):
return self.branch_name
# class BankStatement(models.Model):
#
# acc_no=models.ForeignKey(Branch,on_delete=models.CASCADE)
# receiver_accno=models.CharField(max_length=10,default=True)
# receiver=models.CharField(max_length=120)
# amnt=models.IntegerField(default=0)
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,742 | Prabithapallat01/Bank_app | refs/heads/master | /bank/migrations/0001_initial.py | # Generated by Django 3.1.7 on 2021-05-12 05:41
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Branch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('branch_name', models.CharField(max_length=250)),
('branch_code', models.CharField(max_length=10)),
('bank_name', models.CharField(max_length=250)),
('customer_name', models.CharField(max_length=250)),
('address', models.CharField(max_length=250)),
],
),
]
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,743 | Prabithapallat01/Bank_app | refs/heads/master | /bank/migrations/0004_bankstatement.py | # Generated by Django 3.1.7 on 2021-05-13 07:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('bank', '0003_auto_20210512_2241'),
]
operations = [
migrations.CreateModel(
name='BankStatement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sender', models.CharField(max_length=120)),
('receiver', models.CharField(max_length=120)),
('amnt', models.IntegerField(default=0)),
('acc_no', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bank.branch')),
],
),
]
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,744 | Prabithapallat01/Bank_app | refs/heads/master | /bank/serializers.py | from rest_framework import serializers
from rest_framework.serializers import ModelSerializer
from django.contrib.auth.models import User
from .models import Branch
class BranchSerializer(ModelSerializer):
class Meta:
model=Branch
fields='__all__'
class UserRegisterSerializer(ModelSerializer):
class Meta:
model=User
fields=['first_name','email','username','password']
def reguser(self):
rgstr=User(
first_name=self.validated_data['first_name'],
email=self.validated_data['email'],
username=self.validated_data['username'],
password=self.validated_data['password']
)
rgstr.save()
class LoginSerializer(serializers.Serializer):
username=serializers.CharField()
password=serializers.CharField()
class WithdrawSerailizer(serializers.Serializer):
amount=serializers.IntegerField()
class DepositeSerailizer(serializers.Serializer):
amount=serializers.IntegerField()
# class TransactionTransfer(ModelSerializer):
#
# class Meta:
# model=BankStatement
# fields='__all__'
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,745 | Prabithapallat01/Bank_app | refs/heads/master | /bank/migrations/0003_auto_20210512_2241.py | # Generated by Django 3.1.7 on 2021-05-12 17:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bank', '0002_auto_20210512_2009'),
]
operations = [
migrations.AlterField(
model_name='branch',
name='account_number',
field=models.IntegerField(default=True),
),
]
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,746 | Prabithapallat01/Bank_app | refs/heads/master | /bank/migrations/0005_auto_20210514_1953.py | # Generated by Django 3.1.7 on 2021-05-14 14:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bank', '0004_bankstatement'),
]
operations = [
migrations.RemoveField(
model_name='bankstatement',
name='sender',
),
migrations.AddField(
model_name='bankstatement',
name='receiver_accno',
field=models.CharField(default=True, max_length=10),
),
]
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,747 | Prabithapallat01/Bank_app | refs/heads/master | /bank/views.py | from django.shortcuts import render
from rest_framework.utils.representation import serializer_repr
from .serializers import UserRegisterSerializer,LoginSerializer,BranchSerializer,WithdrawSerailizer,DepositeSerailizer
from .models import Branch
from rest_framework.views import APIView
from rest_framework.authtoken.models import Token
from rest_framework.response import Response
from django.contrib.auth import login,logout,authenticate
from rest_framework import status
from django.contrib.auth.models import User
# Create your views here.
class AccountCreate(APIView):
def get(self,request):
account=Branch.objects.all()
serializer=BranchSerializer(account,many=True)
return Response(serializer.data)
def post(self,request):
serializer=BranchSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
return Response(serializer.errors,status=status.HTTP_400_BAD_REQUEST)
class BalanceApiView(APIView):
def get(self,request,account_number):
account_number=Branch.objects.get(account_number=account_number)
serializer=BranchSerializer(account_number)
return Response(serializer.data)
class Register(APIView):
def get(self,request):
account=User.objects.all()
serializer=UserRegisterSerializer(account,many=True)
return Response(serializer.data)
def post(self,request):
serializer=UserRegisterSerializer(data=request.data)
data={}
if serializer.is_valid():
account=serializer.save()
data['response']="registered"
data['first_name']=account.first_name
data['username']=account.username
data['password']=account.password
data['email']=account.email
else:
data=serializer.errors
return Response(data)
class LoginUser(APIView):
def post(self,request):
serializer=LoginSerializer(data=request.data)
if serializer.is_valid():
username = serializer.validated_data.get("username")
password = serializer.validated_data.get("password")
# user = authenticate(request, username=username, password=password)
user=User.objects.get(username=username)
if (user.username==username)&(user.password==password):
login(request, user)
token,created = Token.objects.get_or_create(user=user)
return Response({"token": token.key},status=status.HTTP_201_CREATED)
else:
return Response({"message":"loginfailed"})
class LogoutUser(APIView):
def post(self,request):
logout(request)
request.user.auth_token.delete()
class WithdrawView(APIView):
def post(self,request,account_number):
serializer=WithdrawSerailizer(data=request.data)
accno=Branch.objects.get(account_number=account_number)
if serializer.is_valid():
amount=serializer.validated_data.get("amount")
if amount<accno.balance:
accno.balance-=amount
accno.save()
return Response({"balance is debited now & Your Current Balance is ":accno.balance})
else:
return Response({"message":"insuffiecient Balance:"})
class DepositeApiView(APIView):
def post(self,request,account_number):
serializer=DepositeSerailizer(data=request.data)
accno=Branch.objects.get(account_number=account_number)
if serializer.is_valid():
amount=serializer.validated_data.get("amount")
accno.balance+=amount
accno.save()
return Response({"Your Account is Credited now and Your Current Balance is":accno.balance})
| {"/bank/serializers.py": ["/bank/models.py"], "/bank/views.py": ["/bank/serializers.py", "/bank/models.py"]} |
66,749 | immortalhope/work_for_you | refs/heads/master | /work/validation.py | import re
# функція повертає ціле число більше нуля до трьох знаків
def get_number(text):
while True:
number_str = input(text)
# if bool(re.match(r"\d{1,3}", number) ):
# correct = True
# else:
# correct = False
# correct = bool(re.match(r"\d{1,3}", number_str) )
#
if bool(re.match(r"\d{1,3}", number_str) ):
number = int(number_str)
if number > 0:
return number
#\d визначає всі цифри
# {1,3} від одного знака до трьох | {"/task2.py": ["/work/validation.py", "/work/recurcion.py"]} |
66,750 | immortalhope/work_for_you | refs/heads/master | /task1.py | print('Толстуха Надія, КМ-82\nВаріант 23. Завдання 1.\nСформувати в програмі масив з цілих чисел від 2 до n. Підрахувати суму квадpатов парних і суму квадратів непарних чисел.')
correct = False
while not correct:
paired = []
unpaired = []
while True:
try:
n = int(input("Введіть число n. \nn="))
break
except ValueError:
print("Введіть коректні дані.")
num = 2
def divide(num, n, paired, unpaired):
if num > n:
return paired, unpaired
if num %2==0:
paired.append(num)
else:
unpaired.append(num)
return divide(num+1, n, paired, unpaired)
print (divide(num, n, paired, unpaired))
squares_paired = []
for elm in paired:
number = elm**2
squares_paired.append(number)
squares_unpaired = []
for elm in unpaired:
number = elm**2
squares_unpaired.append(number)
sum = 0
for elm in squares_paired:
sum += elm
print ("Сума квадратів парних чисел:", sum)
sum = 0
for elm in squares_unpaired:
sum += elm
print ("Сума квадратів непарних чисел:", sum)
choice = input("Якщо хочете завершити програму - натисніть 1, а якщо повторити - будь-що.\n")
if choice == "1":
correct = True
else:
correct = False
| {"/task2.py": ["/work/validation.py", "/work/recurcion.py"]} |
66,751 | immortalhope/work_for_you | refs/heads/master | /task2.py | print ("""Толстуха Надія. КМ-82
Варіант 23. Завдання 2.""")
import numpy as np
import random
# імпортуємо файл з валідацією
import work.validation
# імпортуємо файл з рекурсивними функціями
import work.recurcion
# початок циклу для виходу із програми
correct = False
while not correct:
# Виклик функції для валідації вводу даних
n = work.validation.get_number("Введіть кількість рядків:")
# Створення рандомної матриці
matrix = [[random.randrange(0,10) for y in range(n)] for x in range(n)]
M = np.array(matrix)
print(M)
# Функція, що рекурсивно виводить рядки матриці, з елементами, що знаходяться над головною діагоняллю
# def get_lines(matrix, current_index = 0):
# if matrix.size == 0:#Так як матриця знаходиться в модулі np, умова if matrix == [].... буде працювати лише зі звичайним списком, але не з ndarray.
# return None
# current_row = matrix[0]
# data = current_row[current_index +1:]
# print(data)
# return get_lines(matrix[1:], current_index+1)
# print(get_lines(M))
# Функція, що рекурсивно сумує елементи матриці, що знаходяться вище головної діагоналі.
# Дана функція створена на основі ф-ції get_lines.
# В цій ф-ції викликається ф-ція sum_list з файлу recurcion.py
def sum_lines(matrix, current_index=0, current_sum=0):
if matrix.size == 0:
return current_sum
current_row = matrix[0] #Визначаємо перший рядок матриці
data = current_row[current_index + 1:]
sum = work.recurcion.sum_list(list(data))
return sum_lines(matrix[1:], current_index+1, current_sum+sum)#Через зрізи проходимо дал по всій матриці
# Надання значення функції sum_lines()
x = sum_lines(M)
# Виклик функції sum_lines()
print("Сума елементів матриці, що знаходяться вище головної діагоналі: ", x)
# Функція, що рекурсивно виводить списки матриці з елементами вище побічної діагоналі
# def get_other_lines(matrix, end_of_list = n-1):
# if end_of_list == 0:
# return None
# current_row = matrix[0]
# data = current_row[:end_of_list]
# print(data)
# return get_other_lines(matrix[1:], end_of_list-1)
# print(get_other_lines(M))
def mult_lines(matrix, end_of_list = n-1, current_mult = 1):
if end_of_list == 0:
return current_mult
current_row = matrix[0]
data = current_row[:end_of_list]
mult = work.recurcion.mult_list(list(data), end_of_list)
return mult_lines(matrix[1:], end_of_list-1, current_mult*mult)
y =mult_lines(M)
print("Добуток елементів матриці, що знаходяться вище побічної діагоналі: ", y)
choice = input("Якщо хочете завершити програму - натисніть 1, а якщо повторити - будь-що.\n")
if choice == "1":
correct = True
else:
correct = False
'''
Примітка:
Коли пишете рекурсивну ф-цію спочатку пишіть саму ф-цію,
а потім вже думайте над умовою зупинки
'''
| {"/task2.py": ["/work/validation.py", "/work/recurcion.py"]} |
66,752 | immortalhope/work_for_you | refs/heads/master | /work/recurcion.py |
# Функція, що рекурсивно сумує елементи списку
def sum_list(data):
if data == []:
return 0
return data[0]+sum_list(data[1:])
#Функція, що рекурсивно множить елементи списка
def mult_list(data, end_of_list):
if end_of_list == 0:
return 1
return data[0]*mult_list(data[1:], end_of_list-1)
| {"/task2.py": ["/work/validation.py", "/work/recurcion.py"]} |
66,759 | nguyenquyet27/AR | refs/heads/master | /reference_plane.py | import cv2
import os
from process_func import image_proc
import config
class ReferencePlane(object):
def __init__(self, plane_path):
print('[INFO]Make reference surface ...')
self.orb = cv2.ORB_create()
self.ref_plane = cv2.imread(os.path.join(os.getcwd(), plane_path))
# Set size of image in ./template/ if you want. Change here (230,300)
self.image_ref = image_proc(cv2.resize(self.ref_plane, (230,300)), 1)
self.height, self.width = self.image_ref.shape
self.keypoints, self.descriptors = self.orb.detectAndCompute(
self.image_ref, None)
def get_keypoints(self):
return self.keypoints
def get_descriptors(self):
return self.descriptors
| {"/reference_plane.py": ["/process_func.py", "/config.py"], "/config.py": ["/reference_plane.py"], "/ar_model.py": ["/config.py", "/process_func.py"], "/main.py": ["/config.py", "/process_func.py", "/ar_model.py"]} |
66,760 | nguyenquyet27/AR | refs/heads/master | /config.py | import os
import numpy as np
import cv2
from reference_plane import ReferencePlane
from objloader import OBJ
MIN_MATCHES = 80
image_plane_width = 720
image_plane_height = 480
# Estimate using ./cam-parameters/compute_calibration_matrix.py
camera_intrinsic = np.array(
[[900, 0, image_plane_width/2],
[0, 900, image_plane_height/2],
[0, 0, 1]]
)
marker = ReferencePlane('template/marker.jpg')
marker2 = ReferencePlane('template/marker2.jpg')
_3d_fox = OBJ('models/fox.obj', swapyz=True)
# ORB + FLANN configuration
FLANN_INDEX_LSH = 6
index_params = dict(algorithm=FLANN_INDEX_LSH,
table_number=6, # 12
key_size=12, # 20
multi_probe_level=1) # 2
| {"/reference_plane.py": ["/process_func.py", "/config.py"], "/config.py": ["/reference_plane.py"], "/ar_model.py": ["/config.py", "/process_func.py"], "/main.py": ["/config.py", "/process_func.py", "/ar_model.py"]} |
66,761 | nguyenquyet27/AR | refs/heads/master | /ar_model.py | import cv2
import config
import numpy as np
import process_func as pf
class ARModel(object):
"""
with each card (hopefully), we make into an object so that we can project 3d model onto it
"""
def __init__(self, reference_plane, target_plane):
self.homography = None
# TODO: other handcrafts feature?
self.orb = cv2.ORB_create()
# TODO: other distance formula?
self.bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
self.flann = cv2.FlannBasedMatcher(config.index_params)
self.target = target_plane
self.set_preprocess_target()
self.keypoints, self.descriptors = self.orb.detectAndCompute(
self.target_after, None)
def set_preprocess_target(self):
self.target_after = pf.image_proc(self.target, 1)
def get_preprocess_target(self):
return self.target_after
def get_keypoints(self):
return self.keypoints
def get_descriptors(self):
return self.descriptors
def set_matches(self, reference_plane):
self.matches = self.flann.knnMatch(
reference_plane.get_descriptors(), self.descriptors, k=2)
self.matches = [m[0] for m in self.matches if len(
m) == 2 and m[0].distance < m[1].distance * 0.75]
def get_matches(self):
return self.matches
def set_homography(self, reference_plane):
"""
set homography for target surface object which transform [X,Y,0,1].tranpose to z[u,v,1].transpose
"""
ref_kp = reference_plane.get_keypoints()
src_points = np.float32(
[ref_kp[m.queryIdx].pt for m in self.matches]).reshape(-1, 1, 2)
dst_points = np.float32(
[self.keypoints[m.trainIdx].pt for m in self.matches]).reshape(-1, 1, 2)
H, mask = cv2.findHomography(src_points, dst_points, cv2.RANSAC, 10.0)
self.homography = H
return dst_points
def get_homography(self):
try:
return self.homography
except:
print("Maybe you hasn't calculate homomatrix?")
| {"/reference_plane.py": ["/process_func.py", "/config.py"], "/config.py": ["/reference_plane.py"], "/ar_model.py": ["/config.py", "/process_func.py"], "/main.py": ["/config.py", "/process_func.py", "/ar_model.py"]} |
66,762 | nguyenquyet27/AR | refs/heads/master | /main.py | import cv2
import time
import argparse
import numpy as np
import config
import process_func as pf
from ar_model import ARModel
kt = 0
projection = None
homograp = np.ones((3, 3))
def project_3d_model_to_target_plane(ref, target):
global kt, projection, homograp
target.set_homography(ref)
if kt == 0:
homograp = target.get_homography()
else:
homograp = (target.get_homography()+homograp)/2
kt = 1
points = np.float32(
[[0, 0],
[0, ref.height - 1],
[ref.width - 1, ref.height - 1],
[ref.width - 1, 0]]
).reshape(-1, 1, 2)
dst = cv2.perspectiveTransform(points, homograp)
frame = cv2.polylines(
target.target, [np.int32(dst)], True, (255, 255, 255), 3, cv2.LINE_AA)
if homograp is not None:
try:
# obtain 3D projection matrix from homography matrix and camera parameters
projection = pf.projection_matrix(
config.camera_intrinsic, homograp)
# project cube or model
frame = pf.render(frame, config._3d_fox,
projection, ref.image_ref, False)
except:
pass
return frame
if __name__ == "__main__":
# Parse command line arguments
ap = argparse.ArgumentParser()
ap.add_argument('-vb', default=str(0),
help="lookup cv2.VideoCapture for video backend parameters")
args = ap.parse_args()
cap = cv2.VideoCapture(int(args.vb))
cap.set(cv2.CAP_PROP_FRAME_WIDTH, config.image_plane_width)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, config.image_plane_height)
# Check if camera opened successfully
if (cap.isOpened() == False):
print("Error opening video stream.")
while True:
ret, frame_read = cap.read()
target = ARModel(config.marker, frame_read)
target2 = ARModel(config.marker2, frame_read)
if target.get_descriptors() is None:
cv2.imshow('Frame', frame_read)
if cv2.waitKey(50) == 27:
break
continue
frame_read2 = frame_read
target.set_matches(config.marker)
target2.set_matches(config.marker2)
if len(target.get_matches()) > config.MIN_MATCHES:
frame_read = project_3d_model_to_target_plane(
config.marker, target)
if len(target2.get_matches()) > config.MIN_MATCHES:
frame_read2 = project_3d_model_to_target_plane(
config.marker2, target2)
cv2.addWeighted(frame_read, 0.5, frame_read2, 0.5, 0)
cv2.imshow('Frame', frame_read)
if cv2.waitKey(50) == 27:
break
cap.release()
cv2.destroyAllWindows()
| {"/reference_plane.py": ["/process_func.py", "/config.py"], "/config.py": ["/reference_plane.py"], "/ar_model.py": ["/config.py", "/process_func.py"], "/main.py": ["/config.py", "/process_func.py", "/ar_model.py"]} |
66,763 | nguyenquyet27/AR | refs/heads/master | /process_func.py | import cv2
import math
import random
import numpy as np
def image_proc(img, scale_factor):
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return img_gray
def projection_matrix(camera_parameters, homography):
# Compute rotation along the x and y axis as well as the translation
homography = homography * (-1)
rot_and_transl = np.dot(np.linalg.inv(camera_parameters), homography)
col_1 = rot_and_transl[:, 0]
col_2 = rot_and_transl[:, 1]
col_3 = rot_and_transl[:, 2]
# normalise vectors
l = math.sqrt(np.linalg.norm(col_1, 2) * np.linalg.norm(col_2, 2))
rot_1 = col_1 / l
rot_2 = col_2 / l
translation = col_3 / l
rot_3 = np.cross(rot_1, rot_2)
projection = np.stack((rot_1, rot_2, rot_3, translation)).T
return np.dot(camera_parameters, projection)
def render(img, obj, projection, model, color):
"""
Render a loaded obj model into the current video frame
"""
vertices = obj.vertices
scale_matrix = np.eye(3) * 3
h, w = model.shape
c = 0
for face in obj.faces:
c = c + 0.5
face_vertices = face[0]
points = np.array([vertices[vertex - 1] for vertex in face_vertices])
points = np.dot(points, scale_matrix)
# render model in the middle of the reference surface. To do so,
# model points must be displaced
points = np.array([[p[0] + w / 2, p[1] + h / 2, p[2]] for p in points])
dst = cv2.perspectiveTransform(points.reshape(-1, 1, 3), projection)
imgpts = np.int32(dst)
if color is False:
cv2.fillConvexPoly(img, imgpts, (255, c+100, c))
else:
color = hex_to_rgb(face[-1])
color = color[::-1] # reverse
cv2.fillConvexPoly(img, imgpts, color)
return img
def hex_to_rgb(hex_color):
"""
Helper function to convert hex strings to RGB
"""
hex_color = hex_color.lstrip('#')
h_len = len(hex_color)
return tuple(int(hex_color[i:i + h_len // 3], 16) for i in range(0, h_len, h_len // 3))
| {"/reference_plane.py": ["/process_func.py", "/config.py"], "/config.py": ["/reference_plane.py"], "/ar_model.py": ["/config.py", "/process_func.py"], "/main.py": ["/config.py", "/process_func.py", "/ar_model.py"]} |
66,764 | samueldsr99/tictactoe | refs/heads/master | /state.py | class State:
def __init__(self, matrix: [] = None):
if matrix is None:
self.matrix = [0 for _ in range(9)]
self.turn = 1
else:
self.matrix = matrix
# Check player turn from matrix info
self.turn = 1
for row in matrix:
for cell in matrix:
if cell != 0:
self.turn = 3 - self.turn
def move(self, pos: int):
if self.matrix[pos] != 0:
raise ValueError('Invalid position')
self.matrix[pos] = self.turn
self.turn = 3 - self.turn
def winner(self):
"""
0 | 1 | 2
---|---|---
3 | 4 | 5
---|---|---
6 | 7 | 8
winner id if there is a winner
-1 if not
0 if game has not ended
"""
m = self.matrix
# Horizontal
for i in [0, 3, 6]:
if self._equal(m[i], m[i + 1], m[i + 2]):
return m[i]
# Vertical
for i in [0, 1, 2]:
if self._equal(m[i], m[i + 3], m[i + 6]):
return m[i]
# Diagonals
if self._equal(m[0], m[4], m[8]):
return m[0]
if self._equal(m[2], m[4], m[6]):
return m[2]
if len(self.choices()) > 0:
return 0
return -1
def choices(self):
return [i for i, v in enumerate(self.matrix) if v == 0]
def clone(self):
s = State()
s.matrix = [self.matrix[i] for i in range(9)]
s.turn = self.turn
return s
def _equal(self, x: int, y: int, z: int):
return x != 0 and x == y and y == z and x == z
@staticmethod
def encode(row: int, col: int):
return row * 3 + col
@staticmethod
def decode(pos: int):
pos = pos - 1
return pos // 3, pos % 3 # row, col
def __str__(self):
m = self.matrix
dmap = lambda c : 'X' if c == 1 else 'O' if c == 2 else ' '
ret = ""
for i in [0, 3, 6]:
line = [dmap(m[i]), dmap(m[i + 1]), dmap(m[i + 2])]
ret += ' | '.join(line) + '\n'
if i < 6:
ret += '--+---+--\n'
return ret
| {"/main.py": ["/display.py", "/utils.py", "/state.py", "/agent.py"], "/display.py": ["/state.py", "/utils.py"], "/tests/test_state.py": ["/state.py"], "/agent.py": ["/state.py", "/utils.py"]} |
66,765 | samueldsr99/tictactoe | refs/heads/master | /main.py | from display import Screen
from utils import cprint, GREEN, RED, YELLOW
from state import State
from agent import Agent
from argparse import ArgumentParser
map_move = {
7: 0, 8: 1, 9: 2,
4: 3, 5: 4, 6: 5,
1: 6, 2: 7, 3: 8,
}
imap_move = {
0: 7, 1: 8, 2: 9,
3: 4, 4: 5, 5: 6,
6: 1, 7: 2, 8: 3,
}
def start(player_turn=1):
state = State()
screen = Screen()
agent = Agent()
agent_turn = 3 - player_turn
while not state.winner():
screen.render(state)
if state.turn == agent_turn:
choice = agent.choice(state, agent_turn)
try:
state.move(choice)
except ValueError:
cprint('Invalid position, AI is broken!', RED)
input()
continue
elif state.turn == player_turn:
inp = input()
if inp == '':
continue
inp = int(inp)
if inp < 1 or inp > 9:
cprint('Invalid position', RED)
input()
continue
pos = map_move[inp]
try:
state.move(pos)
except ValueError:
cprint('Invalid position', RED)
input()
continue
screen.render(state)
winner = state.winner()
if winner == -1:
cprint(f'Tie!\n', YELLOW)
else:
cprint(f'Player {winner} wins!\n', GREEN)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument(
'--turn', default='1', type=int, help='Your turn (1 or 2)')
args = parser.parse_args()
if args.turn not in [1, 2]:
cprint('Invalid turn\n', RED)
else:
start(player_turn=int(args.turn))
| {"/main.py": ["/display.py", "/utils.py", "/state.py", "/agent.py"], "/display.py": ["/state.py", "/utils.py"], "/tests/test_state.py": ["/state.py"], "/agent.py": ["/state.py", "/utils.py"]} |
66,766 | samueldsr99/tictactoe | refs/heads/master | /display.py | from state import State
from utils import clear_screen, cprint
from utils import BLACK
class Screen:
def render(self, state: State):
clear_screen()
m = state.matrix
dmap = lambda c : 'X' if c == 1 else 'O' if c == 2 else ' '
for i in [0, 3, 6]:
line = [dmap(m[i]), dmap(m[i + 1]), dmap(m[i + 2])]
cprint(' | '.join(line) + '\n')
if i < 6:
cprint('--+---+--\n')
print('\n\n')
cprint('moves:\n\n', BLACK)
cprint(' 7 | 8 | 9\n', BLACK)
cprint('---|---|---\n', BLACK)
cprint(' 4 | 5 | 6\n', BLACK)
cprint('---|---|---\n', BLACK)
cprint(' 1 | 2 | 3\n\n', BLACK)
| {"/main.py": ["/display.py", "/utils.py", "/state.py", "/agent.py"], "/display.py": ["/state.py", "/utils.py"], "/tests/test_state.py": ["/state.py"], "/agent.py": ["/state.py", "/utils.py"]} |
66,767 | samueldsr99/tictactoe | refs/heads/master | /tests/test_state.py | import unittest
from state import State
class TestStates(unittest.TestCase):
def test_initial_state(self):
s = State()
self.assertEqual(s.matrix, [0 for _ in range(9)])
def test_move(self):
s = State()
correct = [0 for _ in range(9)]
turn = 1
for i in [1, 0, 4, 5, 6, 8, 7, 3, 2]:
s.move(i)
correct[i] = turn
turn = 3 - turn
self.assertEqual(s.matrix, correct)
def test_choices(self):
s = State()
correct = [i for i in range(9)]
self.assertEqual(s.choices(), correct)
s.move(3)
correct.remove(3)
self.assertEqual(s.choices(), correct)
def test_winner(self):
"""
0 | 1 | 2
---|---|---
3 | 4 | 5
---|---|---
6 | 7 | 8
"""
w = [
[0, 4, 8],
[2, 4, 6],
[0, 1, 2],
[3, 4, 5],
[6, 7, 8],
[0, 3, 6],
[1, 4, 7],
[2, 5, 8]
]
for moves in w:
s1, s2 = State(), State()
s1.matrix, s2.matrix = [0 for _ in range(9)], [0 for _ in range(9)]
for m in moves:
s1.matrix[m] = 1
s2.matrix[m] = 2
self.assertEqual(s1.winner(), 1)
self.assertEqual(s2.winner(), 2)
if __name__ == '__main__':
unittest.main()
| {"/main.py": ["/display.py", "/utils.py", "/state.py", "/agent.py"], "/display.py": ["/state.py", "/utils.py"], "/tests/test_state.py": ["/state.py"], "/agent.py": ["/state.py", "/utils.py"]} |
66,768 | samueldsr99/tictactoe | refs/heads/master | /utils.py | from os import system, name
import sys
# colors
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
def has_color(stream):
if not (hasattr(stream, "isatty") and stream.isatty):
return False
try:
import curses
curses.setupterm()
return curses.tigetnum("colors") > 2
except:
return False
has_color = has_color(sys.stdout)
def cprint(text, colour=WHITE):
if has_color:
seq = "\x1b[1;%dm" % (30 + colour) + text + "\x1b[0m"
sys.stdout.write(seq)
else:
sys.stdout.write(text)
def clear_screen():
# for windows
if name == 'nt':
_ = system('cls')
# for mac and linux(here, os.name is 'posix')
else:
_ = system('clear')
| {"/main.py": ["/display.py", "/utils.py", "/state.py", "/agent.py"], "/display.py": ["/state.py", "/utils.py"], "/tests/test_state.py": ["/state.py"], "/agent.py": ["/state.py", "/utils.py"]} |
66,769 | samueldsr99/tictactoe | refs/heads/master | /agent.py | from state import State
import random
from utils import cprint, clear_screen
from utils import (
BLACK,
RED,
GREEN,
YELLOW,
BLUE,
MAGENTA,
CYAN,
WHITE
)
class MiniMaxAgent:
"""
TicTacToe agent that implements Minimax choice criteria
"""
def choice(self, state: State, turn: int):
# Minimax decision
choices = state.choices()
random.shuffle(choices)
best_move, best_value = 0, -2
percent = 0
for i, choice in enumerate(choices):
new_state = state.clone()
new_state.move(choice)
child_value = self.min_value(new_state, turn)
percent += 100 / len(choices)
cprint(f'Thinking... {percent}%\n', CYAN)
if child_value > best_value:
best_value = child_value
best_move = choice
if best_value > 1000:
cprint('mmmm... I\'m about to win :)\n', YELLOW)
input()
return best_move
def connection_value(self, arr: [], val: int):
m = []
for i in [0, 3, 6]:
m.append([arr[i], arr[i + 1], arr[i + 2]])
dr = [1, 0, -1, 0, 1, 1, -1, -1]
dc = [0, 1, 0, -1, 1, -1, -1, 1]
counter = 0
for i in range(len(m)):
for j in range(len(m[0])):
if m[i][j] != val:
continue
for d in range(8):
ni, nj = i + dr[d], j + dc[d]
if ni >= 0 and ni < 3 and nj >= 0 and nj < 3:
counter += (m[i][j] == m[ni][nj])
return counter
def leaf_value(self, state: State, winner: int, turn: int, moves_amount: int):
BIAS = 1000
if winner == turn: # win
return BIAS + 1 / moves_amount
if winner == 3 - turn: # lose
return -1
return self.connection_value(state.matrix, turn) # tie
def min_value(self, state: State, turn: int, depth: int = 1):
winner = state.winner()
if winner != 0:
return self.leaf_value(state, winner, turn, depth)
choices = state.choices()
random.shuffle(choices)
mn = 9999999
o = []
for choice in choices:
new_state = state.clone()
new_state.move(choice)
child_value = self.max_value(new_state, turn, depth + 1)
o.append((child_value, choice))
mn = min(mn, child_value)
return mn
def max_value(self, state: State, turn: int, depth: int = 1):
winner = state.winner()
if winner != 0:
return self.leaf_value(state, winner, turn, depth)
choices = state.choices()
random.shuffle(choices)
mx = -9999999
for choice in choices:
new_state = state.clone()
new_state.move(choice)
child_value = self.min_value(new_state, turn, depth + 1)
mx = max(mx, child_value)
return mx
class RandomAgent:
def choice(self, state: State, turn: int):
return random.choice(state.choices())
# Agent = RandomAgent
Agent = MiniMaxAgent
a = MiniMaxAgent()
print(a.connection_value([2, 1, 1, 1, 2, 2, 2, 1, 1], 1))
print(a.connection_value([2, 1, 2, 1, 1, 2, 1, 2, 1], 1)) | {"/main.py": ["/display.py", "/utils.py", "/state.py", "/agent.py"], "/display.py": ["/state.py", "/utils.py"], "/tests/test_state.py": ["/state.py"], "/agent.py": ["/state.py", "/utils.py"]} |
66,770 | krismaz/GW2Solver | refs/heads/main | /options.py | from utils import gold
budget = 10000 * gold # Maximum gold spend, slightly increases runtime
simplicity = 25 # How many lines of output / distinct operations, keep this low~ish for speed, or just very large for omegaspeed
days = 7 # How many days of buy/sell are you fetching?
days_tag = "7d" # How many days of buy/sell are you fetching?
hours = 12 / 24 # How much of daily buy/sell can you get
sanity = 50000 # How much of one single thing can we do, effectively limits buys
safetyprice = 0.95 # Adjust sell prices slightly, this helps prevent silly 1-2% flips that might technically be optimal, but are rarely great in practice
min_move_per_day = 10 * gold # Do not try to sell and item if we can't sell for more than x gold a day, higher means more speed but less flexibility
min_velocity = 1 # Do not try to buy/sell an item if we can get less than this amount of it in the alotted time, higher means more speed but less flexibility
click_weight = 10 # Up this if you waint less lines but larger chunks
solveroptions = {"threads": 8, "fracGap": 0.1,
"maxSeconds": 240} # Options for the cbc solver
# put your own api key here obvs
apikey = '-'
| {"/options.py": ["/utils.py"], "/solver.py": ["/utils.py", "/options.py"], "/main.py": ["/network.py", "/operations.py", "/solver.py", "/options.py"], "/operations.py": ["/options.py", "/utils.py"], "/network.py": ["/options.py", "/utils.py"]} |
66,771 | krismaz/GW2Solver | refs/heads/main | /utils.py | gold = 100 * 100
silver = 100
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i:i + n]
def coins(value):
result = ""
if value >= gold:
result += f"{value//gold}g"
if value >= silver:
result += f"{(value%gold)//silver}s"
result += f"{value%silver}c"
return result | {"/options.py": ["/utils.py"], "/solver.py": ["/utils.py", "/options.py"], "/main.py": ["/network.py", "/operations.py", "/solver.py", "/options.py"], "/operations.py": ["/options.py", "/utils.py"], "/network.py": ["/options.py", "/utils.py"]} |
66,772 | krismaz/GW2Solver | refs/heads/main | /solver.py | from pulp import LpVariable, LpProblem, value, LpMaximize, lpSum, LpStatus, COIN_CMD
from collections import defaultdict
from time import time
import utils
import options
def solve(operations, budget, simplicity):
start = time()
if simplicity <= 0:
for operation in operations:
operation.limiter = False
operations = [op for op in operations if op.limit > 0]
outputs = set()
inputs = set()
for operation in operations:
outputs = outputs.union(operation.outputs.keys())
inputs = inputs.union(operation.inputs.keys())
operations = [op for op in operations if not set(op.inputs.keys()).difference(outputs)]
operations = [op for op in operations if op.profit > 0 or set(op.outputs.keys()).intersection(inputs)]
print(len(operations), 'Operations after filtering')
lookup = defaultdict(list)
for op in operations:
op.lpvariable = LpVariable(
op.description, 0, int(op.limit), cat=('Integer' if op.outputs else 'Continuous'))
for id in op.inputs.keys():
lookup[id].append(op)
for id in op.outputs.keys():
lookup[id].append(op)
if op.limiter:
op.indicator = LpVariable(
op.description + '_indicator', 0, op.limit//op.chunk_size + 1, cat='Integer')
prob = LpProblem("SCIENCE!", LpMaximize)
prob += lpSum([(op.profit - op.cost) *
op.lpvariable for op in operations]), "PROFIT!"
for item, ops in lookup.items():
prob += lpSum([(op.outputs.get(item, 0) - op.inputs.get(item, 0))
* op.lpvariable for op in ops]) >= 0
for op in operations:
if simplicity and op.limiter:
prob += op.lpvariable <= op.indicator * op.chunk_size
# if options.min_move_per_day and not op.outputs:
# indicator = LpVariable(op.description + '_move_indicator', 0, 1, cat='Binary')
# prob += op.lpvariable <= indicator * options.sanity
# prob += op.lpvariable * op.profit >= indicator * options.min_move_per_day
prob += lpSum(op.cost * op.lpvariable for op in operations if op.cost) <= budget
if simplicity:
prob += lpSum(op.indicator for op in operations if op.limiter) <= simplicity
print('Pulp setup took', time() - start, 'seconds')
print('Starting actual solve now!')
start = time()
solution = prob.solve(COIN_CMD('D:\\cbc\\bin\\cbc.exe', **options.solveroptions))
print('Solution status:', LpStatus[solution],
'in', time() - start, 'seconds')
print(utils.coins(int(prob.objective.value())),' expected profit')
for operation in operations:
operation.value = value(operation.lpvariable)
| {"/options.py": ["/utils.py"], "/solver.py": ["/utils.py", "/options.py"], "/main.py": ["/network.py", "/operations.py", "/solver.py", "/options.py"], "/operations.py": ["/options.py", "/utils.py"], "/network.py": ["/options.py", "/utils.py"]} |
66,773 | krismaz/GW2Solver | refs/heads/main | /main.py | import network
import operations
import solver
import options
from datetime import datetime
import json
timestamp = datetime.now().isoformat().replace(':', '_').replace('.', '_')
print(f"Fetching Data...")
tp_items = network.tp_items()
special_recipes = network.cache(network.special_recipes)
recipes = network.cache(network.recipes)
items = network.cache(network.items)
account_recipes = network.account_recipes()
currentsells = network.currentsells()
dyes = network.cache(network.dyes)[1:] #Skip dye remover
for item in tp_items:
item['adjusted_buy'] = int(item['daily_buy_sold'] * options.hours)
item['adjusted_sell'] = max(0, int((item['daily_sell_sold'] - currentsells[item['id']]) * options.hours))
print("Generating Operations...")
names = {item['id']: item['name'] for item in items}
lookup = {item['id']: item for item in items}
tplookup = {item['id']: item for item in tp_items}
for dye in dyes:
dye['hue'] = dye['categories'][0]
operations = operations.FlipBuy(tp_items) + \
operations.EctoSalvage() + \
operations.Gemstones(names) + \
operations.Data(tplookup) + \
operations.SpecialCrafting(special_recipes, names) + \
operations.Crafting(recipes, names, account_recipes) + \
operations.Fractal() + \
operations.Dyes(dyes, lookup) + \
operations.Salvaging(tp_items, tplookup, lookup) + \
operations.FlipSell(tp_items)
print("Preparing Solver...")
solver.solve(operations, options.budget, options.simplicity)
with open(f'results/{timestamp}.txt', 'w+') as resultfile:
for operation in operations:
if operation.value:
# print(f'{operation.value} x {operation.description}')
print(f'{operation.value} x {operation.description}', file=resultfile)
with open(f'operations.json', 'w+') as resultdatafile:
json.dump([
{
"ID": op.output_hint,
"Name": names.get(op.output_hint, "???"),
"Description": op.description,
"Quantity": int(op.value)
}
for op in operations if op.value], resultdatafile)
| {"/options.py": ["/utils.py"], "/solver.py": ["/utils.py", "/options.py"], "/main.py": ["/network.py", "/operations.py", "/solver.py", "/options.py"], "/operations.py": ["/options.py", "/utils.py"], "/network.py": ["/options.py", "/utils.py"]} |
66,774 | krismaz/GW2Solver | refs/heads/main | /operations.py | import options
import utils
import json
from collections import defaultdict
import glob
import csv
class Operation:
def __init__(self, cost, profit, inputs, outputs, limit, description, limiter, chunk_size, output_hint):
self.cost = cost
self.profit = profit
self.inputs = inputs
self.outputs = outputs
self.limit = min(limit, options.budget // cost if cost else options.sanity)
self.description = description
self.limiter = limiter
self.chunk_size = chunk_size
self.output_hint = output_hint
self.lpvariable = None
self.value = None
self.indicator = None
def FlipBuy(items):
results = []
for item in items:
# Sanity
if 'buy_price' not in item or item['buy_price'] == 0 or ('vendor_value' in item and item['buy_price'] < item['vendor_value']):
continue
# Reduce variance a bit
if item['adjusted_buy'] < options.min_velocity:
continue
results.append(Operation(
(item['buy_price'] + 1),
0,
{},
{item['id']: 1},
min(options.sanity, item['adjusted_buy']),
f'Buy {item["name"]} ({item["id"]}) @ {utils.coins(item["buy_price"] + 1)}',
True,
250 * options.click_weight,
item['id']
))
return results
def FlipSell(items):
results = []
for item in items:
# Sanity
if 'sell_price' not in item:
continue
# Reduce variance a bit
if item['adjusted_sell'] < options.min_velocity:
continue
# save some computation speed
if item['adjusted_sell'] * item['sell_price'] < options.min_move_per_day:
continue
results.append(Operation(
0,
int((item['sell_price'] - 1)*0.85*options.safetyprice),
{item['id']: 1},
{},
min(options.sanity, item['adjusted_sell']),
f'Sell {item["name"]} ({item["id"]}) @ {utils.coins(item["sell_price"] - 1)}',
False,
250 * options.click_weight,
item['id']
))
return results
def SpecialCrafting(recipes, names):
daily = ['-260']
results = []
for recipe in recipes:
# Remove Amalgamated Spam
if recipe['name'] == "Amalgamated Gemstone":
continue
op = Operation(
0,
0,
{i['item_id']: i['count'] for i in recipe['ingredients']},
{recipe['output_item_id']: recipe['output_item_count']},
1 if recipe['id'] in daily else options.sanity,
f'Craft {recipe["name"]} from {", ".join(names.get(i["item_id"], "???") for i in recipe["ingredients"])} ({recipe["id"]})',
False,
1000 * options.click_weight,
recipe['output_item_id']
)
# Gold is handled as id -1
if -1 in op.inputs:
op.cost = op.inputs[-1]
op.description = f'Buy {recipe["name"]} from vendor ({recipe["id"]})'
op.chunk_size = options.sanity
op.limiter = False
del op.inputs[-1]
results.append(op)
return results
def Crafting(recipes, names, account_recipes):
daily = [66913, 79795, 66993, 66917, 66923, 67377, 79726,
79817, 79790, 46744, 79763, 46742, 46740, 46745, 67015]
account_recipes = set(account_recipes)
results = []
for recipe in recipes:
# skip duplicate mithrilium recipe
if recipe['id'] == 12053:
continue
# skip unlearned recipes
if recipe['id'] not in account_recipes and "LearnedFromItem" in recipe['flags']:
continue
results.append(Operation(
0,
0,
{i['item_id']: i['count'] for i in recipe['ingredients']},
{recipe['output_item_id']: recipe['output_item_count']},
1 if recipe['output_item_id'] in daily else options.sanity,
f'Craft {names.get(recipe["output_item_id"], "???")} from {", ".join(names.get(i["item_id"], "???") for i in recipe["ingredients"])} ({recipe["id"]})',
recipe['type'] != 'Refinement',
1000 * options.click_weight,
recipe['output_item_id']
))
return results
def EctoSalvage():
return [Operation(
60,
0,
{19721: 1},
{24277: 1.85},
options.sanity,
f'Salvage Ecto',
False,
options.sanity,
19721
)]
def Gemstones(names):
stones = [24773, 24502, 24884, 24516, 24508, 24522, 72504, 70957, 72315, 76179, 74988,
24515, 75654, 24510, 24512, 76491, 24520, 42010, 72436, 24524, 24533, 24532, 24518, 24514]
return [Operation(
0,
0,
{19721: 5, stone: 75},
{68063: 11.5},
options.sanity,
f'Make gemstones from ecto and {names[stone]}',
False,
5*options.click_weight,
stone
) for stone in stones] + [
Operation(
0,
0,
{24325: 10,
24340: 10,
24330: 10,
70842: 10},
{92687: 1},
options.sanity,
f'Make draconic lodestones from lodestones (Mordrem Lodestone)',
False,
25*options.click_weight,
92687)
] + [
Operation(
0,
0,
{24325: 10,
24340: 10,
24330: 10,
24335: 10},
{92687: 1},
options.sanity,
f'Make draconic lodestones from lodestones (Pile of Putrid Essence)',
False,
25*options.click_weight,
92687)
]
def Data(lookup):
files = glob.glob("Data/*.json")
results = []
for datafile in files:
with open(datafile, 'r') as jsonfile:
data = json.load(jsonfile)
divisor = data['Input']['Quantity']
outputs = defaultdict(int)
for o in data['Outputs']:
outputs[o['ID']] += o['Quantity']
item = lookup[data['Input']['ID']]
# Debug Info
cost = (lookup[data['Input']['ID']]["buy_price"] +
1 + data['Cost']) * data['Input']['Quantity']
profit_buy = sum((lookup[k]["buy_price"] + 1) * v for k, v in outputs.items(
) if k in lookup) + data['Profit'] * data['Input']['Quantity']
profit_sell = sum((lookup[k]["sell_price"] + 1)*0.85 * v for k, v in outputs.items(
) if k in lookup) + data['Profit'] * data['Input']['Quantity']
print(lookup[data['Input']['ID']]["name"], 100 *
(profit_buy-cost)/cost, 100*(profit_sell-cost)/cost)
results.append(Operation(
data['Cost'],
data['Profit'],
{data['Input']['ID']: 1},
{k: v/divisor for k, v in outputs.items()},
options.sanity,
f'{data["Verb"]} {data["Input"]["Name"]}',
False,
options.sanity,
data['Input']['ID']
))
return results
def Fractal():
t5 = [24276, 24299, 24282, 24341, 24294, 24356, 24350, 24288]
outputs = {
49424: 2.25, # infusion
74268: 0.015, # mew
46735: 1, # t7
46731: 1, # t7
46733: 1 # t7
}
outputs.update({i: 0.348 for i in t5})
return [
Operation(
0,
43*utils.silver,
{
75919: 1, # encryption
73248: 0.9 # matrix
},
outputs,
options.sanity,
"Crack fractal Encryptions",
False,
250 * options.click_weight,
75919
)
]
def Dump():
with open("inventory.csv", 'r') as csvfile:
data = csv.DictReader(csvfile)
return [Operation(
0,
0,
{},
{
int(d['ID']):int(d['Total'])
},
1,
f'Use {d["Name"]} ({d["ID"]})',
False,
options.sanity,
int(d['ID'])
)
for d in data]
def Dyes(dyes, lookup):
results = []
hues = {
'Brown': [74982],
'White': [75862],
'Blue': [75694],
'Black': [70426],
'Gray': [75862, 70426],
'Red': [71692],
'Orange': [75270],
'Purple': [77112],
'Yellow': [71952],
'Green': [76799]
}
rarities = {
'Fine': 3,
'Masterwork': 6.5,
'Rare': 10.4
}
for dye in dyes:
if 'item' not in dye or dye['hue'] not in hues or dye['item'] not in lookup:
continue
item = lookup[dye['item']]
if item['rarity'] not in rarities:
continue
results.append(
Operation(
3,
0,
{
item['id']: 0.01
},
{
hue: rarities[item['rarity']]/len(hues[dye['hue']]) for hue in hues[dye['hue']]
},
options.sanity,
f'Salvage {item["name"]}',
False,
options.sanity,
item['id']
)
)
return results
def Salvaging(items, tplookup, lookup):
results = []
champ_items = (44978, 44980, 44983, 72191, 44982, 44960, 44977, 44991, 44984, 44985, 44967, 44964, 44974, 44965, 44971, 44976, 44962,
44961, 44986, 44973, 44969, 44988, 44992, 44968, 44987, 44963, 44990, 44966, 44972, 44975, 44989, 44979, 44981, 44999, 44970)
stats = ("Solder's", "Rabid", "Dire", "Cavalier's", "Shaman's")
insignias = {lookup[i]['name'].split()[0]: i for i in (
46712, 46710, 49522, 46709, 46708)}
inscriptions = {lookup[i]['name'].split()[0]: i for i in (
46688, 46686, 46690, 46685, 46684)}
for item in items:
if item['id'] not in lookup or "NoSalvage" in lookup[item['id']]['flags']:
continue
if item['rarity'] == 'Rare' and \
item['type'] in ['Armor', 'Weapon', 'Trinket'] and \
item['level'] >= 68:
operation = Operation(
60,
0,
{item['id']: 1},
{19721: 0.875},
options.sanity,
f'Extract and salvage {item["name"]} ({item["id"]})',
False,
options.sanity,
item['id']
)
if 'upgrade1' in item:
operation.outputs[item['upgrade1']] = 1
results.append(operation)
if item['rarity'] == 'Exotic' and \
item['type'] in ['Armor', 'Weapon', 'Trinket'] and \
item['level'] >= 68:
operation = Operation(
60,
0,
{item['id']: 1},
{19721: 1.2, 46681:0.5},
options.sanity,
f'Extract and salvage {item["name"]} ({item["id"]})',
False,
options.sanity,
item['id']
)
if 'upgrade1' in item:
operation.outputs[item['upgrade1']] = 1
# We keep this in here for now to avoid craftables
if item['id'] not in champ_items and 'statName' in item and item['statName'] in stats:
if item['type'] == 'Weapon':
operation.outputs[inscriptions[item['statName']]] = 0.4
elif item['type'] == 'Armor':
operation.outputs[insignias[item['statName']]] = 0.4
results.append(operation)
return results
| {"/options.py": ["/utils.py"], "/solver.py": ["/utils.py", "/options.py"], "/main.py": ["/network.py", "/operations.py", "/solver.py", "/options.py"], "/operations.py": ["/options.py", "/utils.py"], "/network.py": ["/options.py", "/utils.py"]} |
66,775 | krismaz/GW2Solver | refs/heads/main | /network.py | import requests
import json
import options
import utils
from collections import defaultdict
headers = {'user-agent': 'GW2Tradz LPSolver Testing (Krismaz.1250)'}
def tp_items():
headers = {'user-agent': 'GW2Tradz LPSolver Testing (Krismaz.1250)'}
r = requests.get(
f'https://api.silveress.ie/gw2/v1/items/json?beautify=min&fields=id,buy_price,sell_price,name,{options.days_tag}_sell_sold,{options.days_tag}_buy_sold,1d_buy_sold,1d_sell_sold,vendor_value,rarity,type,upgrade1,level,statName', headers=headers)
items = r.json()
for item in items:
item['daily_buy_sold'] = min(
item.get(f'{options.days_tag}_sell_sold', 0)/options.days,
item.get(f'1d_buy_sold', 0)*2) # Tends to spike-bouht low-level stuff
item['daily_sell_sold'] = min(
item.get(f'{options.days_tag}_sell_sold', 0)/options.days,
item.get(f'1d_sell_sold', 0)*2) # Tends to spike-bouht low-level stuff
# -.-'
if item['id'] in [93371, 93516, 93619, 93522, 93499]:
item['vendor_value'] = 5*utils.silver
return items
def special_recipes():
r = requests.get(
'http://gw2profits.com/json/v3', headers=headers)
return r.json()
def account_recipes():
r = requests.get(
'https://api.guildwars2.com/v2/account/recipes?access_token=' + options.apikey, headers=headers)
return r.json()
def recipes():
r = requests.get(
'https://api.guildwars2.com/v2/recipes', headers=headers)
ids = r.json()
result = []
for chunk in utils.chunks(ids, 200):
r = requests.get(
'https://api.guildwars2.com/v2/recipes?ids=' + ','.join(map(str, chunk)), headers=headers)
result += r.json()
return result
def items():
r = requests.get(
'https://api.guildwars2.com/v2/items', headers=headers)
ids = r.json()
result = []
for chunk in utils.chunks(ids, 200):
r = requests.get(
'https://api.guildwars2.com/v2/items?ids=' + ','.join(map(str, chunk)), headers=headers)
result += r.json()
return result
def currentsells():
page, maxpage = 0, 0
results = []
while page <= maxpage:
r = requests.get(
f'https://api.guildwars2.com/v2/commerce/transactions/current/sells?access_token={options.apikey}&page={page}', headers=headers)
maxpage = int(r.headers['X-Page-Total']) - 1
results += r.json()
page += 1
counts = defaultdict(int)
for result in results:
counts[result['item_id']] += result['quantity']
return counts
def cache(func):
try:
with open('cache/' + func.__name__+'.json', 'r') as cachefile:
print("Cache hit for", func.__name__)
return json.load(cachefile)
except:
print("Cache miss for", func.__name__)
with open('cache/' + func.__name__+'.json', 'w') as cachefile:
result = func()
json.dump(result, cachefile)
return result
def dyes():
r = requests.get(
f'https://api.guildwars2.com/v2/colors?ids=all', headers=headers)
return r.json() | {"/options.py": ["/utils.py"], "/solver.py": ["/utils.py", "/options.py"], "/main.py": ["/network.py", "/operations.py", "/solver.py", "/options.py"], "/operations.py": ["/options.py", "/utils.py"], "/network.py": ["/options.py", "/utils.py"]} |
66,832 | TqDavid/rssrai2019_scene_classification | refs/heads/master | /data.py | # -*- coding: utf-8 -*-
# @Author : Magic
# @Time : 2019/7/4 12:01
# @File : data.py
from config import config_dict
from dataset import SenseData, SenseDataTest
from torchvision import transforms
img_train = config_dict['data_dir_train']
img_val = config_dict['data_dir_val']
img_test = config_dict['data_dir_test']
def train_augs():
return transforms.Compose([
transforms.RandomResizedCrop(config_dict['im_size']),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(brightness=0.1, contrast=0.1, saturation=0.1),
transforms.ToTensor(),
transforms.Normalize(
[0.485, 0.456, 0.406],
[0.229, 0.224, 0.225]
)
])
def val_augs():
return transforms.Compose([
transforms.Resize(256, interpolation=2),
transforms.CenterCrop(config_dict['im_size']),
transforms.ToTensor(),
transforms.Normalize(
[0.485, 0.456, 0.406],
[0.229, 0.224, 0.225]
)
])
def test_augs():
return transforms.Compose([
transforms.Resize(256, interpolation=2),
transforms.CenterCrop(config_dict['im_size']),
transforms.ToTensor(),
transforms.Normalize(
[0.485, 0.456, 0.406],
[0.229, 0.224, 0.225]
)
])
def get_train_data(img_path=img_train, transform = train_augs()):
return SenseData(img_path, transform)
def get_val_data(img_path=img_val, transform = val_augs()):
return SenseData(img_path, transform)
def get_test_data(img_path=img_test, transform = test_augs()):
return SenseDataTest(img_path, transform)
| {"/data.py": ["/config.py", "/dataset.py"], "/train.py": ["/data.py", "/utils.py"], "/preprocess.py": ["/utils.py", "/config.py"], "/dataset.py": ["/config.py", "/utils.py"], "/test.py": ["/data.py"], "/utils.py": ["/config.py"]} |
66,833 | TqDavid/rssrai2019_scene_classification | refs/heads/master | /train.py | # -*- coding: utf-8 -*-
# @Author : Magic
# @Time : 2019/7/4 12:02
# @File : train.py
import torch, time
import torchvision.models as models
import argparse
import os
import numpy as np
from torch.utils.data import DataLoader
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
from tensorboardX import SummaryWriter
from Nadam import Nadam
from data import get_train_data, get_val_data
from utils import save_checkpoint
import utils
def parse_args():
parser = argparse.ArgumentParser(description='train scene data')
parser.add_argument('--batch-size', default=128, type=int, help='batch size')
parser.add_argument('--num-workers', default=4, type=int ,help='the num of threads to load data')
parser.add_argument('--epochs', default=100, type=int, help='training epochs, Default 500')
parser.add_argument('--lr', default=0.01, type=float, help='learning rate')
parser.add_argument('--num-classes', default=45, type=int, help='the number of classes')
parser.add_argument('--gpus', default='0,1', type=str, help='ordinates of gpus to use, can be "0,1,2,3" ')
parser.add_argument('--seed', default=666, type=int, help='random seed to use, Default=666')
parser.add_argument('--begin-epoch', default=0, type=int, help='begin epoch')
parser.add_argument('--lr-factor', default=0.1, type=float, help='the ratio to reduce lr on each step')
parser.add_argument('--lr-step-epochs', default='20,45,60,80', type=str, help='the epochs to reduce the lr')
parser.add_argument('--save-model-prefix', default='resnext', type=str, help='model prefix')
parser.add_argument('--save-model-step', type=int, default=1, help='snapshot step (epoch num)')
parser.add_argument('--net-params', type=str, default=None, help='resume the training')
parser.add_argument('--log-dir', type=str, default='log', help='the directory of the log')
parser.add_argument('--log-file', type=str, default='log.txt', help='log file path')
return parser.parse_args()
args = parse_args()
writer = SummaryWriter(log_dir='logs_board/resnext')
devs = [int(x) for x in args.gpus.split(',')]
lr_step_epochs = [int(x) for x in args.lr_step_epochs.split(',')]
if args.log_dir:
utils.create_dir(args.log_dir)
logger = utils.Logger(os.path.join(args.log_dir, args.log_file))
train_data = get_train_data()
train_loader = DataLoader(train_data, batch_size=args.batch_size, num_workers=args.num_workers, shuffle=True)
model = models.resnext50_32x4d(pretrained=True)
num_fc = model.fc.in_features
model.fc = nn.Linear(num_fc, args.num_classes)
if args.net_params:
print('=> Loading checkpoint... ')
resume_model = torch.load(args.net_params)
model_dict = resume_model['model']
args.begin_epoch = resume_model['epoch']
pred_dict = {}
for k,v in model_dict.items():
pred_dict[k.replace('module.','')] = v
model.load_state_dict(pred_dict)
if args.begin_epoch:
for i in lr_step_epochs:
if args.begin_epoch>=i:
args.lr = args.lr*0.1
print('Learning rate is ', args.lr)
model = nn.DataParallel(model, device_ids=devs)
model.to('cuda:0')
criterion = nn.CrossEntropyLoss()
optimizer = Nadam(model.parameters(), lr=args.lr)
val_data = get_val_data()
val_loader = DataLoader(val_data, batch_size=args.batch_size, num_workers=args.num_workers, shuffle=False)
def train(epoch):
epoch_loss, rightN = 0, 0
model.train()
for idx, batch in enumerate(train_loader, 1):
img, label = Variable(batch[0], requires_grad=True), Variable(torch.from_numpy(np.array(batch[1])).long())
if torch.cuda.is_available():
img = img.to('cuda:0')
label = label.to('cuda:0')
optimizer.zero_grad()
t0 = time.time()
pred = model(img)
#print(pred.shape, label.shape, label.squeeze().shape)
loss = criterion(pred, label.squeeze())
#cal acc
pred = np.argmax(pred.data.cpu().numpy(), axis=1)
gt = label.squeeze().cpu().numpy()
rightN += (pred==gt).sum()
epoch_loss += loss.item()
loss.backward()
optimizer.step()
writer.add_scalar('scalar/loss',loss.item(), epoch*len(train_loader)+idx)
msg = '==> Epoch[{}]({}/{}): Loss: {:.4f} || Timer: {:.4f} sec.'.format(epoch, idx, len(train_loader),\
loss.item(), time.time()-t0)
logger.write(msg)
msg = '==> Epoch {} Complete. Train Acc: {:.4f} || Avg Loss: {:.4f}'.format(epoch, rightN/len(train_loader.dataset), epoch_loss/len(train_loader))
logger.write(msg)
writer.add_scalar('scalar/train_acc', rightN/len(train_loader.dataset), epoch)
def val(epoch):
model.eval()
with torch.no_grad():
count = 0
for idx, batch in enumerate(val_loader, 1):
img, label = Variable(batch[0]), Variable(torch.from_numpy(np.array(batch[1])).long())
if torch.cuda.is_available():
img = img.to('cuda:0')
label = label.to('cuda:0')
pred = model(img)
#cal acc
pred = np.argmax(pred.data.cpu().numpy(), axis=1)
gt = label.squeeze().cpu().numpy()
count += (pred==gt).sum()
msg = '==> Train{}: Complete. Val Acc: {:.4f} '.format(epoch, count/len(val_loader.dataset))
logger.write(msg)
writer.add_scalar('scalar/val_acc', count/len(val_loader.dataset), epoch)
if __name__ == '__main__':
for epoch in range(args.begin_epoch, args.epochs + 1):
train(epoch)
val(epoch)
if epoch in lr_step_epochs:
for param_group in optimizer.param_groups:
param_group['lr'] *= args.lr_factor
print('Learning rate decay : lr = {}'.format(optimizer.param_groups[0]['lr']))
if (epoch+1) % args.save_model_step == 0:
save_checkpoint(model, epoch, args.save_model_prefix)
writer.close()
| {"/data.py": ["/config.py", "/dataset.py"], "/train.py": ["/data.py", "/utils.py"], "/preprocess.py": ["/utils.py", "/config.py"], "/dataset.py": ["/config.py", "/utils.py"], "/test.py": ["/data.py"], "/utils.py": ["/config.py"]} |
66,834 | TqDavid/rssrai2019_scene_classification | refs/heads/master | /config.py | # -*- coding: utf-8 -*-
# @Author : Magic
# @Time : 2019/7/4 12:01
# @File : config.py
config_dict = dict()
config_dict['name_to_id'] = 'ClsName2id.txt'
config_dict['data_dir'] = 'data'
config_dict['data_dir_train'] = 'data/train'
config_dict['data_dir_val'] = 'data/val'
config_dict['data_dir_test'] = 'data/test'
config_dict['im_size'] = (224, 224)
| {"/data.py": ["/config.py", "/dataset.py"], "/train.py": ["/data.py", "/utils.py"], "/preprocess.py": ["/utils.py", "/config.py"], "/dataset.py": ["/config.py", "/utils.py"], "/test.py": ["/data.py"], "/utils.py": ["/config.py"]} |
66,835 | TqDavid/rssrai2019_scene_classification | refs/heads/master | /preprocess.py | # -*- coding: utf-8 -*-
# @Author : Magic
# @Time : 2019/7/4 12:04
# @File : preprocess.py
import os
import utils
from config import config_dict
# transfrom cn_name to en_name
def transform_name(path):
cn_to_en, _ = utils.map_label()
for ro, di, fi in os.walk(path):
dirname = os.path.dirname(ro)
name = ro.split('/')[-1]
print(dirname, name)
if name in cn_to_en:
os.rename(os.path.join(dirname, name), os.path.join(dirname, cn_to_en[name]))
print('Rename Success!')
def calc_trainset_mean_std():
pass
if __name__ == '__main__':
transform_name(config_dict['data_dir']) | {"/data.py": ["/config.py", "/dataset.py"], "/train.py": ["/data.py", "/utils.py"], "/preprocess.py": ["/utils.py", "/config.py"], "/dataset.py": ["/config.py", "/utils.py"], "/test.py": ["/data.py"], "/utils.py": ["/config.py"]} |
66,836 | TqDavid/rssrai2019_scene_classification | refs/heads/master | /dataset.py | # -*- coding: utf-8 -*-
# @Author : Magic
# @Time : 2019/7/5 10:23
# @File : dataset.py
import torch
import glob
from torch.utils.data import Dataset,DataLoader
from torchvision import transforms
from config import config_dict
from utils import map_label
from PIL import Image
import cv2
class SenseData(Dataset):
def __init__(self, img_dir, transform = None):
super(SenseData, self).__init__()
self.img_list = glob.glob(img_dir+'/*/*.jpg')
self.transform = transform
def __len__(self):
return len(self.img_list)
def __getitem__(self, index):
img = self.img_list[index]
_, label_map = map_label(config_dict['name_to_id'])
img_label = label_map[img.split('/')[2]]
img = Image.open(img)
if self.transform:
img = self.transform(img)
return img, torch.Tensor([int(img_label)-1])
class SenseDataTest(Dataset):
def __init__(self, img_dir, transform = None):
super(SenseDataTest, self).__init__()
self.img_list = sorted(glob.glob(img_dir+'/*.jpg'))
self.transform = transform
def __len__(self):
return len(self.img_list)
def __getitem__(self, index):
img = self.img_list[index]
filedir=img
img = Image.open(img)
if self.transform:
img = self.transform(img)
return index, filedir, img
if __name__ == '__main__':
# train_data = SenseData(config_dict['data_dir_train'], None)
# train_loader = DataLoader(train_data, batch_size=4, shuffle=False, num_workers=1)
# for i, batch in enumerate(train_loader):
# print(batch[0], batch[1])
test_data = SenseDataTest(config_dict['data_dir_test'], transforms.Compose([transforms.Resize(224, interpolation=2),transforms.ToTensor()]))
test_loader = DataLoader(test_data, batch_size=1, shuffle=False, num_workers=1)
for i, batch in enumerate(test_loader):
print(batch.shape)
break | {"/data.py": ["/config.py", "/dataset.py"], "/train.py": ["/data.py", "/utils.py"], "/preprocess.py": ["/utils.py", "/config.py"], "/dataset.py": ["/config.py", "/utils.py"], "/test.py": ["/data.py"], "/utils.py": ["/config.py"]} |
66,837 | TqDavid/rssrai2019_scene_classification | refs/heads/master | /test.py | # -*- coding: utf-8 -*-
# @Author : Magic
# @Time : 2019/7/4 12:02
# @File : test.py
import torch
import argparse, os
import numpy as np
import torchvision.models as models
from torch.utils.data import DataLoader
from torch.autograd import Variable
from data import get_test_data, get_val_data
parser = argparse.ArgumentParser(description='test scene data...')
parser.add_argument('--gpus', default='0', type=str, help='ordinates of gpus to use, can be "0,1,2,3" ')
parser.add_argument('--batch-size', default=1, type=int, help='batch size')
parser.add_argument('--num-workers', default=4, type=int, help='the num of threads to load data')
parser.add_argument('--resume', type=str, default='senet_model_29.pth')
args = parser.parse_args()
os.environ['CUDA_VISBLE_DEVICES'] = args.gpus
# Load Datasets
val_data = get_val_data()
test_data = get_test_data()
val_loader = DataLoader(val_data, num_workers=args.num_workers, batch_size=128, shuffle=False)
test_loader = DataLoader(test_data, num_workers=args.num_workers, batch_size=1, shuffle=False)
# Build Model
model = models.resnext50_32x4d()
in_feature = model.fc.in_features
model.fc = torch.nn.Linear(in_feature, 45)
model_dict = torch.load(args.resume)['model']
pred_dict = {}
for k,v in model_dict.items():
pred_dict[k.replace('module.','')] = v
#model_dict.update(pred_dict)
model.load_state_dict(pred_dict)
model.to('cuda:0')
model.eval()
res = []
def val():
with torch.no_grad():
rightN = 0
for idx, batch in enumerate(val_loader, 1):
img, label = Variable(batch[0]), Variable(torch.from_numpy(np.array(batch[1])).long())
#print(img.shape, label.shape)
if torch.cuda.is_available():
img = img.to('cuda:0')
label = label.to('cuda:0')
pred = model(img)
# cal acc
pred = np.argmax(pred.data.cpu().numpy(), axis=1)
gt = label.squeeze().cpu().numpy()
rightN += (pred == gt).sum()
print('==> Complete. Acc: {:.4f} '.format(rightN / len(val_loader.dataset)))
def eval():
with torch.no_grad():
for idx, batch in enumerate(test_loader):
#print(batch[0], batch[1])
pred = model(batch[2])
pred_label = pred.argmax(dim=1)
result = '{:05d}.jpg {}'.format(idx+1, int(pred_label.item()) + 1)
print(result + ' is done!')
#print('\n')
res.append(result)
with open('submit.txt', 'w') as f:
for line in res:
f.write(line + '\n')
if __name__ == '__main__':
val()
#eval()
# print('Submit.txt is Finished!')
| {"/data.py": ["/config.py", "/dataset.py"], "/train.py": ["/data.py", "/utils.py"], "/preprocess.py": ["/utils.py", "/config.py"], "/dataset.py": ["/config.py", "/utils.py"], "/test.py": ["/data.py"], "/utils.py": ["/config.py"]} |
66,838 | TqDavid/rssrai2019_scene_classification | refs/heads/master | /utils.py | # -*- coding: utf-8 -*-
# @Author : Magic
# @Time : 2019/7/4 10:49
# @File : utils.py
import os
import torch
import numpy as np
from config import config_dict
#file_path = 'F:\\ai_competition\\rssrai2019_scene_classification\\ClsName2id.txt'
file_path = config_dict['name_to_id']
def map_label(file_path=file_path):
chinese_to_english = {}
label_map = {}
with open(file_path, encoding='utf-8') as f:
for line in f.readlines():
cn_name, en_name, label = line.strip().split(':')
chinese_to_english[cn_name] = en_name
label_map[en_name] = label
return chinese_to_english, label_map
def create_dir(path):
if not os.path.exists(path):
try:
os.makedirs(path)
except:
print('Create dir failed! try again.')
raise
def cuda(x):
if torch.cuda.is_available():
if isinstance(x, (list, tuple)):
return [_x.cuda() for _x in x]
else:
return x.cuda()
def save_checkpoint(model, epoch, prefix):
output_path = 'checkpoint/' + prefix + '_model_{}.pth'.format(epoch)
if not os.path.exists('checkpoint/'):
os.mkdir('checkpoint/')
state = {'epoch': epoch, 'model':model.state_dict()}
torch.save(state, output_path)
print('Checkpoint save to {}'.format(output_path))
class Logger(object):
def __init__(self, output_name):
dirname = os.path.dirname(output_name)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.log_file = open(output_name, 'w')
self.info = {}
def append(self, key, value):
vals = self.info.setdefault(key, [])
vals.append(value)
def log(self, extra_msg=''):
msgs = [extra_msg]
for key, vals in self.info.items():
msgs.append('%s %.6f' % (key, np.mean(vals)))
msg = '\n'.join(msgs)
self.log_file.write(msg + '\n')
self.log_file.flush()
self.info = {}
return msg
def write(self, msg):
self.log_file.write(msg + '\n')
self.log_file.flush()
print(msg)
| {"/data.py": ["/config.py", "/dataset.py"], "/train.py": ["/data.py", "/utils.py"], "/preprocess.py": ["/utils.py", "/config.py"], "/dataset.py": ["/config.py", "/utils.py"], "/test.py": ["/data.py"], "/utils.py": ["/config.py"]} |
66,848 | drunkpig/django-background-job | refs/heads/main | /background_job/Trigger.py | from apscheduler.triggers.cron import CronTrigger
from datetime import datetime, timedelta
from tzlocal import get_localzone
from apscheduler.triggers.date import DateTrigger
from apscheduler.triggers.interval import IntervalTrigger
class CronJobTrigger(CronTrigger):
def get_next_fire_time(self):
now = datetime.now().astimezone(self.timezone)
dt = super().get_next_fire_time(previous_fire_time=None, now=now)
delta = dt - now
return delta.total_seconds(), dt
class IntervalJobTrigger(IntervalTrigger):
def get_next_fire_time(self):
now = datetime.now().astimezone(self.timezone)
dt = super().get_next_fire_time(previous_fire_time=None, now=now)
delta = dt - now
return delta.total_seconds(), dt
class OnceJobTrigger(DateTrigger):
def get_next_fire_time(self):
now = datetime.now().astimezone(get_localzone())
dt = super().get_next_fire_time(previous_fire_time=None, now=now)
delta = dt - now
return delta.total_seconds(), dt | {"/background_job/Scheduler.py": ["/background_job/models.py", "/background_job/utils.py"], "/background_job/utils.py": ["/background_job/models.py"], "/background_job/job.py": ["/background_job/models.py", "/background_job/utils.py"], "/background_job/models.py": ["/background_job/Trigger.py"], "/background_job/management/commands/process_tasks.py": ["/background_job/JobProcessor.py", "/background_job/Scheduler.py"], "/background_job/admin.py": ["/background_job/models.py"], "/example/jobs.py": ["/background_job/job.py"], "/background_job/JobProcessor.py": ["/background_job/models.py", "/background_job/utils.py"], "/example/test2.py": ["/example/jobs.py"]} |
66,849 | drunkpig/django-background-job | refs/heads/main | /background_job/migrations/0001_initial.py | # Generated by Django 2.2.13 on 2021-02-24 23:49
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ActionLog',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('action', models.CharField(max_length=256, verbose_name='操作')),
('op_host', models.CharField(max_length=128, verbose_name='操作')),
('gmt_update', models.DateTimeField(auto_now=True)),
('gmt_created', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='DelayedJob',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('job_name', models.CharField(max_length=128)),
('version', models.IntegerField()),
('enable', models.BooleanField(default=True)),
('description', models.TextField(blank=True, null=True)),
('job_function', models.CharField(max_length=128)),
('job_parameters', models.TextField(blank=True)),
('retry', models.IntegerField(default=0)),
('retry_cnt', models.IntegerField(default=0)),
('gmt_update', models.DateTimeField(auto_now=True)),
('gmt_created', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ('gmt_update',),
},
),
migrations.CreateModel(
name='DjangoJob',
fields=[
('id', models.CharField(max_length=64, primary_key=True, serialize=False)),
('job_name', models.CharField(max_length=128)),
('version', models.IntegerField()),
('enable', models.BooleanField(default=True)),
('description', models.TextField(blank=True, null=True)),
('job_function', models.CharField(max_length=128)),
('job_parameters', models.TextField(blank=True)),
('trigger_type', models.CharField(choices=[['cron', 'cron'], ['interval', 'interval'], ['once', 'once'], ['boot_once', 'boot_once']], max_length=128)),
('trigger_expression', models.CharField(max_length=128)),
('max_instances', models.IntegerField(default=1)),
('misfire_grace_time', models.IntegerField(default=0)),
('coalesce', models.BooleanField(default=False)),
('log_succ_interval', models.IntegerField(default=0)),
('log_err_interval', models.IntegerField(default=0)),
('gmt_update', models.DateTimeField(auto_now=True)),
('gmt_created', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ('gmt_update',),
},
),
migrations.CreateModel(
name='JobExecHistory',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('job_name', models.CharField(max_length=128, verbose_name='任务名称')),
('trigger_type', models.CharField(max_length=128, verbose_name='任务类型')),
('version', models.IntegerField()),
('status', models.CharField(choices=[['New', 'New'], ['Running', 'Running'], ['Success', 'Success'], ['Error!', 'Error!'], ['Max instances reached!', 'Max instances reached!'], ['Missed!', 'Missed!']], max_length=50)),
('result', models.TextField(blank=True, verbose_name='执行返回结果')),
('start_tm', models.DateTimeField(auto_now_add=True)),
('end_tm', models.DateTimeField(auto_now=True)),
('trace_message', models.TextField(blank=True, verbose_name='追踪日志')),
('gmt_update', models.DateTimeField(auto_now=True)),
('gmt_created', models.DateTimeField(auto_now_add=True)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='background_job.DjangoJob')),
],
options={
'ordering': ('-start_tm',),
},
),
]
| {"/background_job/Scheduler.py": ["/background_job/models.py", "/background_job/utils.py"], "/background_job/utils.py": ["/background_job/models.py"], "/background_job/job.py": ["/background_job/models.py", "/background_job/utils.py"], "/background_job/models.py": ["/background_job/Trigger.py"], "/background_job/management/commands/process_tasks.py": ["/background_job/JobProcessor.py", "/background_job/Scheduler.py"], "/background_job/admin.py": ["/background_job/models.py"], "/example/jobs.py": ["/background_job/job.py"], "/background_job/JobProcessor.py": ["/background_job/models.py", "/background_job/utils.py"], "/example/test2.py": ["/example/jobs.py"]} |
66,850 | drunkpig/django-background-job | refs/heads/main | /background_job/Scheduler.py | import json
import logging
import queue
import threading
import time
from queue import Queue
import sched, datetime
from django.db.models import Max
from background_job.models import DjangoJob, JobExecHistory
from background_job.utils import get_max_job_version, log_job_history, log_action
logger = logging.getLogger()
class Scheduler(threading.Thread):
def __init__(self, queue:Queue, ):
super().__init__()
self.setDaemon(False)
self.queue = queue
# load from db
self.__load_jobs()
self.max_update_tm = self.__get_max_update_tm()
self.timer = sched.scheduler(time.time, time.sleep)
def __load_jobs(self):
jobs = self.__get_all_jobs()
job_list = {}
if jobs:
for j in jobs:
job_list[j.id] = j
self.job_list = job_list
log_action(f"load {len(self.job_list.keys())} jobs")
def __get_all_jobs(self):
self.max_version = get_max_job_version()
jobs = DjangoJob.objects.filter(enable=True, version=self.max_version).all()
return jobs
def __get_max_update_tm(self):
x = DjangoJob.objects.aggregate(Max('gmt_update'))
if x:
return x['gmt_update__max']
else:
return datetime.datetime.now()
def run(self):
if len(self.job_list.keys())<=0:
logger.info("no task to schedule")
else:
for job in self.job_list.values():
if job.enable:
if job.trigger_type in ['cron', 'interval']:
self.__lunch_periodical_job(job)
elif job.trigger_type=='once':
self.__lunch_once_job(job)
else:
self.__lunch_once_boot_job(job)
self.timer.enter(60, 1, self.__reload_job) # 更新job
while True:
self.timer.run(blocking=True)
logger.warning("调度无任务,暂时休眠")
time.sleep(1)
def __reload_job(self):
"""
新增、删除、更改了
"""
self.timer.enter(60, 1, self.__reload_job) # 更新job
jobs = DjangoJob.objects.filter(version=self.max_version, gmt_update__gt=self.max_update_tm).all()
if jobs:
for j in jobs:
id = j.id
enable = j.enable
# 1, 删除,enable->disable, disable->enable
if enable and id not in self.job_list.keys():
# 从disable -> enable, 新调度
if j.trigger_type in ['cron', 'interval']:
self.job_list[id] = j
self.__lunch_periodical_job(j)
logger.info("enable job=%s", j.job_name)
log_action(f"new enable job={j.job_name}")
elif enable and id in self.job_list.keys():
# enable -> disable ---> enable
self.job_list[id].enable = True
self.__lunch_periodical_job(self.job_list[id])
log_action(f"disable->enable job={j.job_name}")
elif not enable and id in self.job_list.keys():
# 从enable ->disable #停止调度
self.job_list[id].enable = False
self.job_list[id].job_parameters = j.job_parameters
self.job_list[id].job_name = j.job_name
self.job_list[id].job_function = j.job_function
self.job_list[id].trigger_expression = j.trigger_expression
logger.info("disable job=%s", j.job_name)
log_action(f"disable job={j.job_name}")
# 删除无法感知,除非整体重新加载。
else:
logger.info("没有发现更新的job")
self.max_update_tm = self.__get_max_update_tm()
def __lunch_periodical_job(self, job):
seconds_to_wait, _ = job.next_run_time()
if seconds_to_wait<0 and abs(seconds_to_wait)<=job.misfire_grace_time: # 是否在指定时间容错范围内可以执行
seconds_to_wait = 0
elif seconds_to_wait<0:
# TODO 根据loglevel决定是否记录
logger.info("task [%s] missed! (delay, misfire_grace_time)=(%s, %s)", job.job_function, seconds_to_wait, job.misfire_grace_time)
log_job_history(job.instance(), status=JobExecHistory.MISSED, result=None, trace_message=None)
return
logger.info("task [%s] will invoke after [%f] seconds later", job.job_function, seconds_to_wait)
evt = self.timer.enter(seconds_to_wait, 0, self.__fire_job, argument=(job, ))
job.evt = evt
def __lunch_once_job(self, job):
seconds_to_wait, _ = job.next_run_time()
if seconds_to_wait < 0 and abs(seconds_to_wait) <= job.misfire_grace_time: # 是否在指定时间容错范围内可以执行
seconds_to_wait = 0
elif seconds_to_wait < 0:
log_job_history(job.instance(), status=JobExecHistory.MISSED, result=None, trace_message=f"delay ~ misfire_grace_time")
logger.info("task [%s] missed! (delay, misfire_grace_time)=(%s, %s)", job.job_function, seconds_to_wait,
job.misfire_grace_time)
return
logger.info("task [%s] will invoke after [%f] seconds later", job.job_function, seconds_to_wait)
self.timer.enter(seconds_to_wait, 0, self.__fire_once_job, argument=(job,))
def __fire_once_job(self, job):
self.__lunch_once_boot_job(job)
def __lunch_once_boot_job(self, job):
job_instance = job.instance()
try:
log_job_history(job_instance, status=JobExecHistory.NEW, result=None, trace_message=None)
self.queue.put_nowait(job_instance)
except queue.Full as e:
logger.exception(e)
log_job_history(job_instance, status=JobExecHistory.MISSED, result=None, trace_message=e)
except Exception as e:
logger.exception(e)
log_job_history(job_instance, status=JobExecHistory.MISSED, result=None, trace_message=e)
def __fire_job(self, job:DjangoJob):
job_instance = job.instance()
try:
log_job_history(job_instance, status=JobExecHistory.NEW, result=None, trace_message=None)
self.queue.put_nowait(job_instance)
except queue.Full as e:
logger.exception(e)
log_job_history(job_instance, status=JobExecHistory.MISSED, result=None, trace_message=e)
except Exception as e:
logger.exception(e)
log_job_history(job_instance, status=JobExecHistory.MISSED, result=None, trace_message=e)
seconds_to_wait,_ = job.next_run_time()
if seconds_to_wait>0 or abs(seconds_to_wait) <= job.misfire_grace_time:
if job.enable:
self.timer.enter(seconds_to_wait, 0, self.__fire_job, argument=(job,))
logger.info("task [%s] will invoke after [%f] seconds later", job.job_function, seconds_to_wait)
else:
logger.info("task [%s] missed! (delay, misfire_grace_time)=(%s, %s)", job.job_function, seconds_to_wait,
job.misfire_grace_time)
log_job_history(job.instance(), status=JobExecHistory.MISSED, result=None, trace_message=f"delay ~ misfire_grace_time")
| {"/background_job/Scheduler.py": ["/background_job/models.py", "/background_job/utils.py"], "/background_job/utils.py": ["/background_job/models.py"], "/background_job/job.py": ["/background_job/models.py", "/background_job/utils.py"], "/background_job/models.py": ["/background_job/Trigger.py"], "/background_job/management/commands/process_tasks.py": ["/background_job/JobProcessor.py", "/background_job/Scheduler.py"], "/background_job/admin.py": ["/background_job/models.py"], "/example/jobs.py": ["/background_job/job.py"], "/background_job/JobProcessor.py": ["/background_job/models.py", "/background_job/utils.py"], "/example/test2.py": ["/example/jobs.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.