commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
b7db8dd62a323b2f6707c2e660acd94471037f10 | Add configuration url to Surepetcare (#58039) | aronsky/home-assistant,home-assistant/home-assistant,toddeye/home-assistant,aronsky/home-assistant,rohitranjan1991/home-assistant,jawilson/home-assistant,jawilson/home-assistant,nkgilley/home-assistant,lukas-hetzenecker/home-assistant,GenericStudent/home-assistant,GenericStudent/home-assistant,lukas-hetzenecker/home-assistant,rohitranjan1991/home-assistant,w1ll1am23/home-assistant,home-assistant/home-assistant,mezz64/home-assistant,mezz64/home-assistant,toddeye/home-assistant,rohitranjan1991/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant | homeassistant/components/surepetcare/entity.py | homeassistant/components/surepetcare/entity.py | """Entity for Surepetcare."""
from __future__ import annotations
from abc import abstractmethod
from surepy.entities import SurepyEntity
from homeassistant.core import callback
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import SurePetcareDataCoordinator
from .const import DOMAIN
class SurePetcareEntity(CoordinatorEntity):
"""An implementation for Sure Petcare Entities."""
def __init__(
self,
surepetcare_id: int,
coordinator: SurePetcareDataCoordinator,
) -> None:
"""Initialize a Sure Petcare entity."""
super().__init__(coordinator)
self._id = surepetcare_id
surepy_entity: SurepyEntity = coordinator.data[surepetcare_id]
if surepy_entity.name:
self._device_name = surepy_entity.name.capitalize()
else:
self._device_name = surepy_entity.type.name.capitalize().replace("_", " ")
self._device_id = f"{surepy_entity.household_id}-{surepetcare_id}"
self._attr_device_info = DeviceInfo(
configuration_url="https://surepetcare.io/dashboard/",
identifiers={(DOMAIN, self._device_id)},
name=self._device_name,
manufacturer="Sure Petcare",
model=surepy_entity.type.name.capitalize().replace("_", " "),
)
self._update_attr(coordinator.data[surepetcare_id])
@abstractmethod
@callback
def _update_attr(self, surepy_entity: SurepyEntity) -> None:
"""Update the state and attributes."""
@callback
def _handle_coordinator_update(self) -> None:
"""Get the latest data and update the state."""
self._update_attr(self.coordinator.data[self._id])
self.async_write_ha_state()
| """Entity for Surepetcare."""
from __future__ import annotations
from abc import abstractmethod
from surepy.entities import SurepyEntity
from homeassistant.core import callback
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import SurePetcareDataCoordinator
from .const import DOMAIN
class SurePetcareEntity(CoordinatorEntity):
"""An implementation for Sure Petcare Entities."""
def __init__(
self,
surepetcare_id: int,
coordinator: SurePetcareDataCoordinator,
) -> None:
"""Initialize a Sure Petcare entity."""
super().__init__(coordinator)
self._id = surepetcare_id
surepy_entity: SurepyEntity = coordinator.data[surepetcare_id]
if surepy_entity.name:
self._device_name = surepy_entity.name.capitalize()
else:
self._device_name = surepy_entity.type.name.capitalize().replace("_", " ")
self._device_id = f"{surepy_entity.household_id}-{surepetcare_id}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._device_id)},
name=self._device_name,
manufacturer="Sure Petcare",
model=surepy_entity.type.name.capitalize().replace("_", " "),
)
self._update_attr(coordinator.data[surepetcare_id])
@abstractmethod
@callback
def _update_attr(self, surepy_entity: SurepyEntity) -> None:
"""Update the state and attributes."""
@callback
def _handle_coordinator_update(self) -> None:
"""Get the latest data and update the state."""
self._update_attr(self.coordinator.data[self._id])
self.async_write_ha_state()
| apache-2.0 | Python |
2a772ed74613d8842de2efb10282830c9b368174 | Add new layers to docs_api. Change: 128179419 | jeffzheng1/tensorflow,aselle/tensorflow,ppwwyyxx/tensorflow,manipopopo/tensorflow,ran5515/DeepDecision,freedomtan/tensorflow,eaplatanios/tensorflow,arborh/tensorflow,dongjoon-hyun/tensorflow,ZhangXinNan/tensorflow,Bismarrck/tensorflow,SnakeJenny/TensorFlow,jart/tensorflow,Intel-Corporation/tensorflow,cxxgtxy/tensorflow,annarev/tensorflow,dancingdan/tensorflow,rdipietro/tensorflow,DCSaunders/tensorflow,AnishShah/tensorflow,nikste/tensorflow,kchodorow/tensorflow,nanditav/15712-TensorFlow,pcm17/tensorflow,gautam1858/tensorflow,tomasreimers/tensorflow-emscripten,aldian/tensorflow,Moriadry/tensorflow,freedomtan/tensorflow,rabipanda/tensorflow,renyi533/tensorflow,ran5515/DeepDecision,laosiaudi/tensorflow,anilmuthineni/tensorflow,vrv/tensorflow,Bismarrck/tensorflow,mavenlin/tensorflow,jostep/tensorflow,aldian/tensorflow,jart/tensorflow,mavenlin/tensorflow,benoitsteiner/tensorflow-xsmm,mengxn/tensorflow,sjperkins/tensorflow,manipopopo/tensorflow,nburn42/tensorflow,drpngx/tensorflow,snnn/tensorflow,manjunaths/tensorflow,ppries/tensorflow,jendap/tensorflow,maciekcc/tensorflow,gojira/tensorflow,johndpope/tensorflow,jeffzheng1/tensorflow,kevin-coder/tensorflow-fork,jalexvig/tensorflow,sandeepgupta2k4/tensorflow,sarvex/tensorflow,markslwong/tensorflow,aldian/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_saved_model,pcm17/tensorflow,cxxgtxy/tensorflow,gunan/tensorflow,seaotterman/tensorflow,tntnatbry/tensorflow,aselle/tensorflow,lakshayg/tensorflow,av8ramit/tensorflow,arborh/tensorflow,taknevski/tensorflow-xsmm,ibmsoe/tensorflow,xzturn/tensorflow,yongtang/tensorflow,strint/tensorflow,wangyum/tensorflow,av8ramit/tensorflow,Kongsea/tensorflow,thesuperzapper/tensorflow,alisidd/tensorflow,odejesush/tensorflow,pavelchristof/gomoku-ai,ravindrapanda/tensorflow,LUTAN/tensorflow,ArtsiomCh/tensorflow,eadgarchen/tensorflow,jwlawson/tensorflow,jhaux/tensorflow,horance-liu/tensorflow,tillahoffmann/tensorflow,zasdfgbnm/tensorflow,eaplatanios/tensorflow,nolanliou/tensorflow,codrut3/tensorflow,av8ramit/tensorflow,suiyuan2009/tensorflow,alshedivat/tensorflow,guschmue/tensorflow,benoitsteiner/tensorflow-opencl,dyoung418/tensorflow,tongwang01/tensorflow,markslwong/tensorflow,pcm17/tensorflow,dongjoon-hyun/tensorflow,ppries/tensorflow,RapidApplicationDevelopment/tensorflow,mortada/tensorflow,freedomtan/tensorflow,DCSaunders/tensorflow,kobejean/tensorflow,alsrgv/tensorflow,LUTAN/tensorflow,theflofly/tensorflow,Mazecreator/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,seaotterman/tensorflow,alisidd/tensorflow,ychfan/tensorflow,dancingdan/tensorflow,odejesush/tensorflow,raymondxyang/tensorflow,johndpope/tensorflow,horance-liu/tensorflow,nburn42/tensorflow,elingg/tensorflow,tomasreimers/tensorflow-emscripten,freedomtan/tensorflow,manipopopo/tensorflow,codrut3/tensorflow,alheinecke/tensorflow-xsmm,asimshankar/tensorflow,naturali/tensorflow,tornadozou/tensorflow,Bulochkin/tensorflow_pack,hsaputra/tensorflow,haeusser/tensorflow,benoitsteiner/tensorflow-opencl,jeffzheng1/tensorflow,tomasreimers/tensorflow-emscripten,code-sauce/tensorflow,manipopopo/tensorflow,ghchinoy/tensorflow,haeusser/tensorflow,ppwwyyxx/tensorflow,AndreasMadsen/tensorflow,paolodedios/tensorflow,kamcpp/tensorflow,anilmuthineni/tensorflow,asimshankar/tensorflow,scenarios/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,mrry/tensorflow,snnn/tensorflow,LUTAN/tensorflow,alivecor/tensorflow,yongtang/tensorflow,jendap/tensorflow,girving/tensorflow,Intel-Corporation/tensorflow,davidzchen/tensorflow,gnieboer/tensorflow,MycChiu/tensorflow,DavidNorman/tensorflow,manazhao/tf_recsys,eaplatanios/tensorflow,lukeiwanski/tensorflow-opencl,raymondxyang/tensorflow,benoitsteiner/tensorflow,with-git/tensorflow,renyi533/tensorflow,jendap/tensorflow,av8ramit/tensorflow,Xeralux/tensorflow,xzturn/tensorflow,theflofly/tensorflow,sarvex/tensorflow,kevin-coder/tensorflow-fork,kevin-coder/tensorflow-fork,markslwong/tensorflow,chris-chris/tensorflow,andrewcmyers/tensorflow,ghchinoy/tensorflow,alheinecke/tensorflow-xsmm,meteorcloudy/tensorflow,manjunaths/tensorflow,chris-chris/tensorflow,theflofly/tensorflow,ishay2b/tensorflow,caisq/tensorflow,yaroslavvb/tensorflow,Kongsea/tensorflow,taknevski/tensorflow-xsmm,benoitsteiner/tensorflow-xsmm,frreiss/tensorflow-fred,bowang/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,zycdragonball/tensorflow,xodus7/tensorflow,sarvex/tensorflow,alisidd/tensorflow,yanchen036/tensorflow,with-git/tensorflow,dancingdan/tensorflow,jalexvig/tensorflow,haeusser/tensorflow,scenarios/tensorflow,wangyum/tensorflow,benoitsteiner/tensorflow-opencl,AndreasMadsen/tensorflow,ArtsiomCh/tensorflow,petewarden/tensorflow,caisq/tensorflow,jbedorf/tensorflow,RapidApplicationDevelopment/tensorflow,cg31/tensorflow,anilmuthineni/tensorflow,jwlawson/tensorflow,caisq/tensorflow,jalexvig/tensorflow,jwlawson/tensorflow,thesuperzapper/tensorflow,DavidNorman/tensorflow,alshedivat/tensorflow,rdipietro/tensorflow,vrv/tensorflow,jeffzheng1/tensorflow,eadgarchen/tensorflow,freedomtan/tensorflow,codrut3/tensorflow,alivecor/tensorflow,jhseu/tensorflow,whn09/tensorflow,Bulochkin/tensorflow_pack,cg31/tensorflow,nikste/tensorflow,jhaux/tensorflow,handroissuazo/tensorflow,horance-liu/tensorflow,mavenlin/tensorflow,alistairlow/tensorflow,jwlawson/tensorflow,kchodorow/tensorflow,elingg/tensorflow,mdrumond/tensorflow,ghchinoy/tensorflow,DCSaunders/tensorflow,benoitsteiner/tensorflow-xsmm,memo/tensorflow,llhe/tensorflow,paolodedios/tensorflow,scenarios/tensorflow,sandeepgupta2k4/tensorflow,ageron/tensorflow,codrut3/tensorflow,brchiu/tensorflow,jendap/tensorflow,ghchinoy/tensorflow,cg31/tensorflow,ravindrapanda/tensorflow,johndpope/tensorflow,kchodorow/tensorflow,tntnatbry/tensorflow,asadziach/tensorflow,eadgarchen/tensorflow,calebfoss/tensorflow,karllessard/tensorflow,pierreg/tensorflow,jbedorf/tensorflow,gibiansky/tensorflow,MycChiu/tensorflow,haeusser/tensorflow,sandeepgupta2k4/tensorflow,guschmue/tensorflow,jendap/tensorflow,handroissuazo/tensorflow,Intel-tensorflow/tensorflow,chris-chris/tensorflow,horance-liu/tensorflow,gibiansky/tensorflow,DavidNorman/tensorflow,whn09/tensorflow,ville-k/tensorflow,chemelnucfin/tensorflow,Mazecreator/tensorflow,hehongliang/tensorflow,nolanliou/tensorflow,ychfan/tensorflow,adamtiger/tensorflow,snnn/tensorflow,caisq/tensorflow,dendisuhubdy/tensorflow,Intel-Corporation/tensorflow,HKUST-SING/tensorflow,aselle/tensorflow,seanli9jan/tensorflow,Xeralux/tensorflow,ville-k/tensorflow,anand-c-goog/tensorflow,gojira/tensorflow,dancingdan/tensorflow,gibiansky/tensorflow,alistairlow/tensorflow,ran5515/DeepDecision,ZhangXinNan/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,seaotterman/tensorflow,Mistobaan/tensorflow,davidzchen/tensorflow,handroissuazo/tensorflow,meteorcloudy/tensorflow,alsrgv/tensorflow,thjashin/tensorflow,renyi533/tensorflow,tornadozou/tensorflow,sandeepdsouza93/TensorFlow-15712,tensorflow/tensorflow-pywrap_tf_optimizer,tomasreimers/tensorflow-emscripten,yufengg/tensorflow,DCSaunders/tensorflow,gibiansky/tensorflow,tensorflow/tensorflow,raymondxyang/tensorflow,hehongliang/tensorflow,jhseu/tensorflow,ageron/tensorflow,sjperkins/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kamcpp/tensorflow,zasdfgbnm/tensorflow,XueqingLin/tensorflow,dendisuhubdy/tensorflow,renyi533/tensorflow,LUTAN/tensorflow,a-doumoulakis/tensorflow,benoitsteiner/tensorflow,nburn42/tensorflow,eerwitt/tensorflow,MycChiu/tensorflow,yongtang/tensorflow,XueqingLin/tensorflow,annarev/tensorflow,xzturn/tensorflow,gunan/tensorflow,Bismarrck/tensorflow,theflofly/tensorflow,MoamerEncsConcordiaCa/tensorflow,code-sauce/tensorflow,allenlavoie/tensorflow,anilmuthineni/tensorflow,RapidApplicationDevelopment/tensorflow,raymondxyang/tensorflow,ageron/tensorflow,seanli9jan/tensorflow,sandeepdsouza93/TensorFlow-15712,rdipietro/tensorflow,bowang/tensorflow,martinwicke/tensorflow,jhaux/tensorflow,xzturn/tensorflow,manazhao/tf_recsys,sjperkins/tensorflow,ibmsoe/tensorflow,kamcpp/tensorflow,jeffzheng1/tensorflow,JingJunYin/tensorflow,tiagofrepereira2012/tensorflow,andrewcmyers/tensorflow,lukeiwanski/tensorflow,Bulochkin/tensorflow_pack,asadziach/tensorflow,tensorflow/tensorflow-pywrap_saved_model,SnakeJenny/TensorFlow,laosiaudi/tensorflow,pavelchristof/gomoku-ai,mixturemodel-flow/tensorflow,ageron/tensorflow,allenlavoie/tensorflow,benoitsteiner/tensorflow-xsmm,apark263/tensorflow,jbedorf/tensorflow,drpngx/tensorflow,benoitsteiner/tensorflow-xsmm,apark263/tensorflow,ppries/tensorflow,AndreasMadsen/tensorflow,martinwicke/tensorflow,horance-liu/tensorflow,Bulochkin/tensorflow_pack,ishay2b/tensorflow,tillahoffmann/tensorflow,martinwicke/tensorflow,cancan101/tensorflow,lakshayg/tensorflow,sandeepgupta2k4/tensorflow,hfp/tensorflow-xsmm,mortada/tensorflow,Moriadry/tensorflow,theflofly/tensorflow,adit-chandra/tensorflow,jwlawson/tensorflow,snnn/tensorflow,Intel-tensorflow/tensorflow,maciekcc/tensorflow,code-sauce/tensorflow,Kongsea/tensorflow,theflofly/tensorflow,with-git/tensorflow,hsaputra/tensorflow,SnakeJenny/TensorFlow,code-sauce/tensorflow,Moriadry/tensorflow,handroissuazo/tensorflow,DavidNorman/tensorflow,snnn/tensorflow,jhaux/tensorflow,dongjoon-hyun/tensorflow,HKUST-SING/tensorflow,rabipanda/tensorflow,cancan101/tensorflow,maciekcc/tensorflow,raymondxyang/tensorflow,bowang/tensorflow,girving/tensorflow,Intel-tensorflow/tensorflow,odejesush/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,SnakeJenny/TensorFlow,mrry/tensorflow,rabipanda/tensorflow,asimshankar/tensorflow,jwlawson/tensorflow,ageron/tensorflow,petewarden/tensorflow,dyoung418/tensorflow,mengxn/tensorflow,admcrae/tensorflow,davidzchen/tensorflow,Mistobaan/tensorflow,juharris/tensorflow,xodus7/tensorflow,jhaux/tensorflow,neilhan/tensorflow,juharris/tensorflow,ychfan/tensorflow,chemelnucfin/tensorflow,nolanliou/tensorflow,AndreasMadsen/tensorflow,taknevski/tensorflow-xsmm,juharris/tensorflow,markslwong/tensorflow,ibmsoe/tensorflow,ibmsoe/tensorflow,renyi533/tensorflow,jhaux/tensorflow,neilhan/tensorflow,manazhao/tf_recsys,tensorflow/tensorflow-pywrap_tf_optimizer,calebfoss/tensorflow,bowang/tensorflow,aam-at/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,mdrumond/tensorflow,petewarden/tensorflow,ran5515/DeepDecision,bowang/tensorflow,aselle/tensorflow,adit-chandra/tensorflow,alshedivat/tensorflow,jwlawson/tensorflow,zycdragonball/tensorflow,codrut3/tensorflow,johndpope/tensorflow,llhe/tensorflow,MostafaGazar/tensorflow,nightjean/Deep-Learning,with-git/tensorflow,calebfoss/tensorflow,neilhan/tensorflow,chemelnucfin/tensorflow,guschmue/tensorflow,mixturemodel-flow/tensorflow,tongwang01/tensorflow,wangyum/tensorflow,asadziach/tensorflow,jeffzheng1/tensorflow,benoitsteiner/tensorflow,calebfoss/tensorflow,mortada/tensorflow,karllessard/tensorflow,eaplatanios/tensorflow,kobejean/tensorflow,alistairlow/tensorflow,rabipanda/tensorflow,ville-k/tensorflow,HKUST-SING/tensorflow,unsiloai/syntaxnet-ops-hack,rabipanda/tensorflow,aam-at/tensorflow,Mazecreator/tensorflow,arborh/tensorflow,Mazecreator/tensorflow,HKUST-SING/tensorflow,aselle/tensorflow,alsrgv/tensorflow,alshedivat/tensorflow,DavidNorman/tensorflow,mavenlin/tensorflow,scenarios/tensorflow,abhitopia/tensorflow,eadgarchen/tensorflow,yaroslavvb/tensorflow,adit-chandra/tensorflow,codrut3/tensorflow,MycChiu/tensorflow,tillahoffmann/tensorflow,mdrumond/tensorflow,jart/tensorflow,rdipietro/tensorflow,Bulochkin/tensorflow_pack,Xeralux/tensorflow,ravindrapanda/tensorflow,jwlawson/tensorflow,tensorflow/tensorflow-pywrap_saved_model,asimshankar/tensorflow,memo/tensorflow,caisq/tensorflow,JingJunYin/tensorflow,aam-at/tensorflow,zasdfgbnm/tensorflow,chemelnucfin/tensorflow,HKUST-SING/tensorflow,meteorcloudy/tensorflow,sarvex/tensorflow,MoamerEncsConcordiaCa/tensorflow,krikru/tensorflow-opencl,abhitopia/tensorflow,gunan/tensorflow,chris-chris/tensorflow,Intel-Corporation/tensorflow,xodus7/tensorflow,elingg/tensorflow,RapidApplicationDevelopment/tensorflow,martinwicke/tensorflow,kchodorow/tensorflow,jbedorf/tensorflow,zasdfgbnm/tensorflow,brchiu/tensorflow,paolodedios/tensorflow,mdrumond/tensorflow,cancan101/tensorflow,with-git/tensorflow,JingJunYin/tensorflow,hfp/tensorflow-xsmm,apark263/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,yufengg/tensorflow,aam-at/tensorflow,llhe/tensorflow,kchodorow/tensorflow,odejesush/tensorflow,alheinecke/tensorflow-xsmm,hfp/tensorflow-xsmm,Xeralux/tensorflow,ArtsiomCh/tensorflow,ibmsoe/tensorflow,manjunaths/tensorflow,cg31/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,pcm17/tensorflow,hehongliang/tensorflow,mortada/tensorflow,manipopopo/tensorflow,jendap/tensorflow,anand-c-goog/tensorflow,dyoung418/tensorflow,girving/tensorflow,neilhan/tensorflow,LUTAN/tensorflow,JingJunYin/tensorflow,XueqingLin/tensorflow,guschmue/tensorflow,lukeiwanski/tensorflow-opencl,benoitsteiner/tensorflow-xsmm,ppwwyyxx/tensorflow,dendisuhubdy/tensorflow,dancingdan/tensorflow,bowang/tensorflow,chemelnucfin/tensorflow,DCSaunders/tensorflow,rabipanda/tensorflow,martinwicke/tensorflow,apark263/tensorflow,gautam1858/tensorflow,llhe/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,laszlocsomor/tensorflow,pavelchristof/gomoku-ai,apark263/tensorflow,benoitsteiner/tensorflow,horance-liu/tensorflow,adamtiger/tensorflow,ZhangXinNan/tensorflow,Mazecreator/tensorflow,chenjun0210/tensorflow,andrewcmyers/tensorflow,yaroslavvb/tensorflow,asimshankar/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,cg31/tensorflow,xzturn/tensorflow,memo/tensorflow,jendap/tensorflow,RapidApplicationDevelopment/tensorflow,laszlocsomor/tensorflow,tntnatbry/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jart/tensorflow,yaroslavvb/tensorflow,alheinecke/tensorflow-xsmm,naturali/tensorflow,abhitopia/tensorflow,pierreg/tensorflow,asadziach/tensorflow,whn09/tensorflow,dendisuhubdy/tensorflow,lakshayg/tensorflow,yufengg/tensorflow,llhe/tensorflow,rabipanda/tensorflow,aldian/tensorflow,rdipietro/tensorflow,alsrgv/tensorflow,kevin-coder/tensorflow-fork,abhitopia/tensorflow,nightjean/Deep-Learning,manjunaths/tensorflow,apark263/tensorflow,eerwitt/tensorflow,dyoung418/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,adit-chandra/tensorflow,tongwang01/tensorflow,ArtsiomCh/tensorflow,tntnatbry/tensorflow,cg31/tensorflow,seanli9jan/tensorflow,thjashin/tensorflow,JVillella/tensorflow,Mazecreator/tensorflow,freedomtan/tensorflow,laosiaudi/tensorflow,cxxgtxy/tensorflow,Bismarrck/tensorflow,llhe/tensorflow,vrv/tensorflow,strint/tensorflow,tornadozou/tensorflow,davidzchen/tensorflow,MycChiu/tensorflow,AnishShah/tensorflow,manipopopo/tensorflow,anilmuthineni/tensorflow,guschmue/tensorflow,ychfan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tornadozou/tensorflow,pcm17/tensorflow,vrv/tensorflow,dancingdan/tensorflow,drpngx/tensorflow,AnishShah/tensorflow,anand-c-goog/tensorflow,gojira/tensorflow,nanditav/15712-TensorFlow,tornadozou/tensorflow,Intel-tensorflow/tensorflow,anilmuthineni/tensorflow,adit-chandra/tensorflow,tomasreimers/tensorflow-emscripten,eerwitt/tensorflow,girving/tensorflow,petewarden/tensorflow,zasdfgbnm/tensorflow,gautam1858/tensorflow,cg31/tensorflow,jostep/tensorflow,girving/tensorflow,kobejean/tensorflow,sandeepdsouza93/TensorFlow-15712,cxxgtxy/tensorflow,manipopopo/tensorflow,maciekcc/tensorflow,adamtiger/tensorflow,jart/tensorflow,Bismarrck/tensorflow,raymondxyang/tensorflow,strint/tensorflow,tensorflow/tensorflow-pywrap_saved_model,chris-chris/tensorflow,elingg/tensorflow,cancan101/tensorflow,petewarden/tensorflow,hehongliang/tensorflow,annarev/tensorflow,haeusser/tensorflow,seanli9jan/tensorflow,jwlawson/tensorflow,av8ramit/tensorflow,Bismarrck/tensorflow,freedomtan/tensorflow,Intel-tensorflow/tensorflow,lukeiwanski/tensorflow,elingg/tensorflow,seaotterman/tensorflow,tongwang01/tensorflow,gibiansky/tensorflow,kamcpp/tensorflow,RapidApplicationDevelopment/tensorflow,jalexvig/tensorflow,yanchen036/tensorflow,seaotterman/tensorflow,pierreg/tensorflow,karllessard/tensorflow,nburn42/tensorflow,brchiu/tensorflow,Carmezim/tensorflow,Intel-Corporation/tensorflow,ArtsiomCh/tensorflow,ghchinoy/tensorflow,dancingdan/tensorflow,benoitsteiner/tensorflow-opencl,benoitsteiner/tensorflow,annarev/tensorflow,ibmsoe/tensorflow,Carmezim/tensorflow,suiyuan2009/tensorflow,ArtsiomCh/tensorflow,naturali/tensorflow,kobejean/tensorflow,dongjoon-hyun/tensorflow,lakshayg/tensorflow,ran5515/DeepDecision,mortada/tensorflow,petewarden/tensorflow,with-git/tensorflow,manjunaths/tensorflow,tntnatbry/tensorflow,Xeralux/tensorflow,Kongsea/tensorflow,alistairlow/tensorflow,jalexvig/tensorflow,whn09/tensorflow,sarvex/tensorflow,Bismarrck/tensorflow,seanli9jan/tensorflow,haeusser/tensorflow,XueqingLin/tensorflow,alistairlow/tensorflow,dongjoon-hyun/tensorflow,horance-liu/tensorflow,juharris/tensorflow,ran5515/DeepDecision,jbedorf/tensorflow,jwlawson/tensorflow,markslwong/tensorflow,meteorcloudy/tensorflow,wangyum/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,adamtiger/tensorflow,unsiloai/syntaxnet-ops-hack,hfp/tensorflow-xsmm,tensorflow/tensorflow-experimental_link_static_libraries_once,renyi533/tensorflow,ville-k/tensorflow,scenarios/tensorflow,eadgarchen/tensorflow,suiyuan2009/tensorflow,yongtang/tensorflow,ville-k/tensorflow,scenarios/tensorflow,gojira/tensorflow,drpngx/tensorflow,gautam1858/tensorflow,zasdfgbnm/tensorflow,nanditav/15712-TensorFlow,ravindrapanda/tensorflow,a-doumoulakis/tensorflow,eadgarchen/tensorflow,Mistobaan/tensorflow,annarev/tensorflow,davidzchen/tensorflow,tillahoffmann/tensorflow,naturali/tensorflow,gunan/tensorflow,lukeiwanski/tensorflow-opencl,laszlocsomor/tensorflow,JingJunYin/tensorflow,gibiansky/tensorflow,rdipietro/tensorflow,snnn/tensorflow,annarev/tensorflow,Moriadry/tensorflow,tntnatbry/tensorflow,martinwicke/tensorflow,unsiloai/syntaxnet-ops-hack,jalexvig/tensorflow,scenarios/tensorflow,sandeepdsouza93/TensorFlow-15712,aam-at/tensorflow,frreiss/tensorflow-fred,Bismarrck/tensorflow,dongjoon-hyun/tensorflow,ravindrapanda/tensorflow,jhseu/tensorflow,mrry/tensorflow,zycdragonball/tensorflow,petewarden/tensorflow,annarev/tensorflow,adit-chandra/tensorflow,jostep/tensorflow,HKUST-SING/tensorflow,Bismarrck/tensorflow,cg31/tensorflow,sandeepgupta2k4/tensorflow,unsiloai/syntaxnet-ops-hack,tensorflow/tensorflow-pywrap_saved_model,tornadozou/tensorflow,AnishShah/tensorflow,Mazecreator/tensorflow,davidzchen/tensorflow,benoitsteiner/tensorflow-xsmm,kchodorow/tensorflow,alivecor/tensorflow,martinwicke/tensorflow,ZhangXinNan/tensorflow,eadgarchen/tensorflow,Xeralux/tensorflow,XueqingLin/tensorflow,code-sauce/tensorflow,whn09/tensorflow,MoamerEncsConcordiaCa/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,anand-c-goog/tensorflow,nburn42/tensorflow,AnishShah/tensorflow,kobejean/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,gnieboer/tensorflow,benoitsteiner/tensorflow-opencl,adit-chandra/tensorflow,thjashin/tensorflow,pavelchristof/gomoku-ai,frreiss/tensorflow-fred,jbedorf/tensorflow,wangyum/tensorflow,zycdragonball/tensorflow,lakshayg/tensorflow,unsiloai/syntaxnet-ops-hack,taknevski/tensorflow-xsmm,nikste/tensorflow,AndreasMadsen/tensorflow,manipopopo/tensorflow,mdrumond/tensorflow,ghchinoy/tensorflow,jhseu/tensorflow,renyi533/tensorflow,arborh/tensorflow,tiagofrepereira2012/tensorflow,strint/tensorflow,anilmuthineni/tensorflow,drpngx/tensorflow,sandeepdsouza93/TensorFlow-15712,cancan101/tensorflow,dendisuhubdy/tensorflow,gunan/tensorflow,mengxn/tensorflow,llhe/tensorflow,RapidApplicationDevelopment/tensorflow,JVillella/tensorflow,eaplatanios/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,Bulochkin/tensorflow_pack,adamtiger/tensorflow,SnakeJenny/TensorFlow,pierreg/tensorflow,xodus7/tensorflow,llhe/tensorflow,ishay2b/tensorflow,haeusser/tensorflow,frreiss/tensorflow-fred,xodus7/tensorflow,kevin-coder/tensorflow-fork,nanditav/15712-TensorFlow,ppwwyyxx/tensorflow,nolanliou/tensorflow,gnieboer/tensorflow,petewarden/tensorflow,jhseu/tensorflow,calebfoss/tensorflow,Mistobaan/tensorflow,chemelnucfin/tensorflow,bowang/tensorflow,jhseu/tensorflow,LUTAN/tensorflow,thesuperzapper/tensorflow,Bulochkin/tensorflow_pack,annarev/tensorflow,drpngx/tensorflow,jeffzheng1/tensorflow,strint/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jalexvig/tensorflow,dongjoon-hyun/tensorflow,handroissuazo/tensorflow,alivecor/tensorflow,a-doumoulakis/tensorflow,mavenlin/tensorflow,snnn/tensorflow,alistairlow/tensorflow,DCSaunders/tensorflow,alistairlow/tensorflow,mavenlin/tensorflow,handroissuazo/tensorflow,alivecor/tensorflow,calebfoss/tensorflow,tomasreimers/tensorflow-emscripten,gautam1858/tensorflow,admcrae/tensorflow,manipopopo/tensorflow,abhitopia/tensorflow,aldian/tensorflow,nburn42/tensorflow,benoitsteiner/tensorflow,frreiss/tensorflow-fred,nikste/tensorflow,JVillella/tensorflow,naturali/tensorflow,manazhao/tf_recsys,chemelnucfin/tensorflow,asadziach/tensorflow,jhaux/tensorflow,ppwwyyxx/tensorflow,haeusser/tensorflow,MycChiu/tensorflow,MoamerEncsConcordiaCa/tensorflow,mixturemodel-flow/tensorflow,juharris/tensorflow,renyi533/tensorflow,AnishShah/tensorflow,guschmue/tensorflow,nburn42/tensorflow,ageron/tensorflow,chemelnucfin/tensorflow,strint/tensorflow,Xeralux/tensorflow,taknevski/tensorflow-xsmm,davidzchen/tensorflow,nikste/tensorflow,tillahoffmann/tensorflow,nightjean/Deep-Learning,yanchen036/tensorflow,eadgarchen/tensorflow,gunan/tensorflow,Bulochkin/tensorflow_pack,DavidNorman/tensorflow,anand-c-goog/tensorflow,chemelnucfin/tensorflow,with-git/tensorflow,frreiss/tensorflow-fred,ZhangXinNan/tensorflow,caisq/tensorflow,DCSaunders/tensorflow,tensorflow/tensorflow,sjperkins/tensorflow,vrv/tensorflow,tiagofrepereira2012/tensorflow,andrewcmyers/tensorflow,zycdragonball/tensorflow,alsrgv/tensorflow,girving/tensorflow,allenlavoie/tensorflow,yanchen036/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,jendap/tensorflow,ppries/tensorflow,tongwang01/tensorflow,johndpope/tensorflow,hsaputra/tensorflow,pavelchristof/gomoku-ai,seanli9jan/tensorflow,Mistobaan/tensorflow,krikru/tensorflow-opencl,ghchinoy/tensorflow,eaplatanios/tensorflow,mixturemodel-flow/tensorflow,alheinecke/tensorflow-xsmm,mrry/tensorflow,gnieboer/tensorflow,petewarden/tensorflow,ibmsoe/tensorflow,thesuperzapper/tensorflow,hfp/tensorflow-xsmm,llhe/tensorflow,johndpope/tensorflow,Carmezim/tensorflow,calebfoss/tensorflow,admcrae/tensorflow,zasdfgbnm/tensorflow,elingg/tensorflow,jendap/tensorflow,cancan101/tensorflow,johndpope/tensorflow,codrut3/tensorflow,odejesush/tensorflow,yufengg/tensorflow,chenjun0210/tensorflow,alheinecke/tensorflow-xsmm,gunan/tensorflow,yaroslavvb/tensorflow,pcm17/tensorflow,hsaputra/tensorflow,hehongliang/tensorflow,ageron/tensorflow,Mistobaan/tensorflow,alistairlow/tensorflow,alivecor/tensorflow,Kongsea/tensorflow,gnieboer/tensorflow,kobejean/tensorflow,neilhan/tensorflow,gojira/tensorflow,gunan/tensorflow,ageron/tensorflow,pcm17/tensorflow,horance-liu/tensorflow,theflofly/tensorflow,SnakeJenny/TensorFlow,kamcpp/tensorflow,tntnatbry/tensorflow,eadgarchen/tensorflow,cxxgtxy/tensorflow,AndreasMadsen/tensorflow,codrut3/tensorflow,yongtang/tensorflow,anilmuthineni/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,av8ramit/tensorflow,rabipanda/tensorflow,guschmue/tensorflow,XueqingLin/tensorflow,abhitopia/tensorflow,drpngx/tensorflow,meteorcloudy/tensorflow,admcrae/tensorflow,pierreg/tensorflow,ychfan/tensorflow,tillahoffmann/tensorflow,manazhao/tf_recsys,ville-k/tensorflow,benoitsteiner/tensorflow-xsmm,Carmezim/tensorflow,davidzchen/tensorflow,ZhangXinNan/tensorflow,Kongsea/tensorflow,lukeiwanski/tensorflow,andrewcmyers/tensorflow,Kongsea/tensorflow,JVillella/tensorflow,code-sauce/tensorflow,tntnatbry/tensorflow,chris-chris/tensorflow,ZhangXinNan/tensorflow,ibmsoe/tensorflow,sandeepgupta2k4/tensorflow,MoamerEncsConcordiaCa/tensorflow,ppwwyyxx/tensorflow,chenjun0210/tensorflow,dancingdan/tensorflow,xodus7/tensorflow,hsaputra/tensorflow,markslwong/tensorflow,asadziach/tensorflow,eerwitt/tensorflow,asadziach/tensorflow,handroissuazo/tensorflow,allenlavoie/tensorflow,a-doumoulakis/tensorflow,DCSaunders/tensorflow,ppwwyyxx/tensorflow,nikste/tensorflow,lakshayg/tensorflow,alisidd/tensorflow,adamtiger/tensorflow,lukeiwanski/tensorflow,dancingdan/tensorflow,scenarios/tensorflow,kamcpp/tensorflow,Xeralux/tensorflow,RapidApplicationDevelopment/tensorflow,tomasreimers/tensorflow-emscripten,kobejean/tensorflow,brchiu/tensorflow,naturali/tensorflow,ghchinoy/tensorflow,drpngx/tensorflow,nikste/tensorflow,alsrgv/tensorflow,strint/tensorflow,andrewcmyers/tensorflow,DavidNorman/tensorflow,snnn/tensorflow,wangyum/tensorflow,ravindrapanda/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,annarev/tensorflow,nightjean/Deep-Learning,whn09/tensorflow,taknevski/tensorflow-xsmm,memo/tensorflow,yaroslavvb/tensorflow,eaplatanios/tensorflow,theflofly/tensorflow,zasdfgbnm/tensorflow,benoitsteiner/tensorflow-opencl,laszlocsomor/tensorflow,dancingdan/tensorflow,mavenlin/tensorflow,code-sauce/tensorflow,gunan/tensorflow,aldian/tensorflow,av8ramit/tensorflow,raymondxyang/tensorflow,anand-c-goog/tensorflow,cancan101/tensorflow,chenjun0210/tensorflow,zasdfgbnm/tensorflow,odejesush/tensorflow,neilhan/tensorflow,Xeralux/tensorflow,Bulochkin/tensorflow_pack,horance-liu/tensorflow,andrewcmyers/tensorflow,a-doumoulakis/tensorflow,manjunaths/tensorflow,vrv/tensorflow,thjashin/tensorflow,ychfan/tensorflow,mrry/tensorflow,drpngx/tensorflow,chenjun0210/tensorflow,jart/tensorflow,MostafaGazar/tensorflow,pierreg/tensorflow,jhaux/tensorflow,mengxn/tensorflow,juharris/tensorflow,yongtang/tensorflow,mrry/tensorflow,mrry/tensorflow,mengxn/tensorflow,brchiu/tensorflow,Carmezim/tensorflow,karllessard/tensorflow,manipopopo/tensorflow,arborh/tensorflow,gunan/tensorflow,nanditav/15712-TensorFlow,MostafaGazar/tensorflow,krikru/tensorflow-opencl,av8ramit/tensorflow,gnieboer/tensorflow,eadgarchen/tensorflow,kevin-coder/tensorflow-fork,ghchinoy/tensorflow,alisidd/tensorflow,chenjun0210/tensorflow,jendap/tensorflow,hfp/tensorflow-xsmm,adit-chandra/tensorflow,mixturemodel-flow/tensorflow,codrut3/tensorflow,rabipanda/tensorflow,av8ramit/tensorflow,meteorcloudy/tensorflow,tensorflow/tensorflow,MostafaGazar/tensorflow,Mistobaan/tensorflow,paolodedios/tensorflow,gojira/tensorflow,dyoung418/tensorflow,thjashin/tensorflow,nolanliou/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,meteorcloudy/tensorflow,seaotterman/tensorflow,tongwang01/tensorflow,tensorflow/tensorflow,lukeiwanski/tensorflow-opencl,aam-at/tensorflow,jart/tensorflow,alheinecke/tensorflow-xsmm,AndreasMadsen/tensorflow,cancan101/tensorflow,laszlocsomor/tensorflow,anand-c-goog/tensorflow,suiyuan2009/tensorflow,paolodedios/tensorflow,alshedivat/tensorflow,allenlavoie/tensorflow,theflofly/tensorflow,seanli9jan/tensorflow,yufengg/tensorflow,MycChiu/tensorflow,thesuperzapper/tensorflow,calebfoss/tensorflow,alistairlow/tensorflow,ppries/tensorflow,hsaputra/tensorflow,Intel-Corporation/tensorflow,admcrae/tensorflow,ishay2b/tensorflow,whn09/tensorflow,nolanliou/tensorflow,gibiansky/tensorflow,karllessard/tensorflow,gojira/tensorflow,odejesush/tensorflow,maciekcc/tensorflow,brchiu/tensorflow,mengxn/tensorflow,tensorflow/tensorflow,kobejean/tensorflow,LUTAN/tensorflow,jalexvig/tensorflow,frreiss/tensorflow-fred,krikru/tensorflow-opencl,gnieboer/tensorflow,aam-at/tensorflow,ppwwyyxx/tensorflow,a-doumoulakis/tensorflow,AnishShah/tensorflow,ville-k/tensorflow,Xeralux/tensorflow,ZhangXinNan/tensorflow,ppwwyyxx/tensorflow,vrv/tensorflow,freedomtan/tensorflow,jostep/tensorflow,Bismarrck/tensorflow,xzturn/tensorflow,lukeiwanski/tensorflow,DavidNorman/tensorflow,alshedivat/tensorflow,Mazecreator/tensorflow,theflofly/tensorflow,Intel-tensorflow/tensorflow,JingJunYin/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,jhseu/tensorflow,arborh/tensorflow,brchiu/tensorflow,kamcpp/tensorflow,strint/tensorflow,tensorflow/tensorflow,asimshankar/tensorflow,manjunaths/tensorflow,mixturemodel-flow/tensorflow,alheinecke/tensorflow-xsmm,alisidd/tensorflow,brchiu/tensorflow,HKUST-SING/tensorflow,allenlavoie/tensorflow,petewarden/tensorflow,girving/tensorflow,mdrumond/tensorflow,zasdfgbnm/tensorflow,krikru/tensorflow-opencl,ZhangXinNan/tensorflow,suiyuan2009/tensorflow,hsaputra/tensorflow,xodus7/tensorflow,mortada/tensorflow,admcrae/tensorflow,nanditav/15712-TensorFlow,hfp/tensorflow-xsmm,gojira/tensorflow,lukeiwanski/tensorflow-opencl,nightjean/Deep-Learning,snnn/tensorflow,girving/tensorflow,lakshayg/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yufengg/tensorflow,cxxgtxy/tensorflow,tiagofrepereira2012/tensorflow,vrv/tensorflow,renyi533/tensorflow,thesuperzapper/tensorflow,odejesush/tensorflow,benoitsteiner/tensorflow-xsmm,tomasreimers/tensorflow-emscripten,JVillella/tensorflow,ishay2b/tensorflow,elingg/tensorflow,jalexvig/tensorflow,guschmue/tensorflow,johndpope/tensorflow,hfp/tensorflow-xsmm,mortada/tensorflow,ran5515/DeepDecision,ppwwyyxx/tensorflow,laosiaudi/tensorflow,xzturn/tensorflow,tiagofrepereira2012/tensorflow,karllessard/tensorflow,alsrgv/tensorflow,kchodorow/tensorflow,thesuperzapper/tensorflow,meteorcloudy/tensorflow,admcrae/tensorflow,JVillella/tensorflow,alshedivat/tensorflow,laosiaudi/tensorflow,frreiss/tensorflow-fred,pavelchristof/gomoku-ai,neilhan/tensorflow,sjperkins/tensorflow,jeffzheng1/tensorflow,scenarios/tensorflow,zycdragonball/tensorflow,aselle/tensorflow,neilhan/tensorflow,MoamerEncsConcordiaCa/tensorflow,gnieboer/tensorflow,AndreasMadsen/tensorflow,davidzchen/tensorflow,jostep/tensorflow,rdipietro/tensorflow,kchodorow/tensorflow,zasdfgbnm/tensorflow,xodus7/tensorflow,admcrae/tensorflow,mengxn/tensorflow,apark263/tensorflow,HKUST-SING/tensorflow,kchodorow/tensorflow,gojira/tensorflow,tiagofrepereira2012/tensorflow,elingg/tensorflow,guschmue/tensorflow,nanditav/15712-TensorFlow,taknevski/tensorflow-xsmm,tillahoffmann/tensorflow,sjperkins/tensorflow,tomasreimers/tensorflow-emscripten,lukeiwanski/tensorflow,xzturn/tensorflow,cancan101/tensorflow,Mistobaan/tensorflow,MostafaGazar/tensorflow,jostep/tensorflow,MycChiu/tensorflow,juharris/tensorflow,gautam1858/tensorflow,mavenlin/tensorflow,adit-chandra/tensorflow,benoitsteiner/tensorflow,jeffzheng1/tensorflow,chemelnucfin/tensorflow,ravindrapanda/tensorflow,tensorflow/tensorflow,girving/tensorflow,alshedivat/tensorflow,brchiu/tensorflow,JingJunYin/tensorflow,with-git/tensorflow,ibmsoe/tensorflow,MoamerEncsConcordiaCa/tensorflow,haeusser/tensorflow,gautam1858/tensorflow,guschmue/tensorflow,cg31/tensorflow,ville-k/tensorflow,ville-k/tensorflow,sandeepdsouza93/TensorFlow-15712,neilhan/tensorflow,apark263/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,dancingdan/tensorflow,jostep/tensorflow,laosiaudi/tensorflow,yufengg/tensorflow,dyoung418/tensorflow,alivecor/tensorflow,MoamerEncsConcordiaCa/tensorflow,drpngx/tensorflow,jhaux/tensorflow,dyoung418/tensorflow,mdrumond/tensorflow,xodus7/tensorflow,chris-chris/tensorflow,adit-chandra/tensorflow,ychfan/tensorflow,pavelchristof/gomoku-ai,laosiaudi/tensorflow,laosiaudi/tensorflow,ZhangXinNan/tensorflow,gibiansky/tensorflow,elingg/tensorflow,alshedivat/tensorflow,sandeepgupta2k4/tensorflow,seanli9jan/tensorflow,kevin-coder/tensorflow-fork,mdrumond/tensorflow,XueqingLin/tensorflow,DavidNorman/tensorflow,gautam1858/tensorflow,jalexvig/tensorflow,Mistobaan/tensorflow,laszlocsomor/tensorflow,allenlavoie/tensorflow,ychfan/tensorflow,adit-chandra/tensorflow,Moriadry/tensorflow,seanli9jan/tensorflow,aam-at/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,memo/tensorflow,eaplatanios/tensorflow,aselle/tensorflow,memo/tensorflow,benoitsteiner/tensorflow-opencl,lukeiwanski/tensorflow,sandeepgupta2k4/tensorflow,asimshankar/tensorflow,jhseu/tensorflow,thjashin/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ppries/tensorflow,mortada/tensorflow,naturali/tensorflow,meteorcloudy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,laszlocsomor/tensorflow,chenjun0210/tensorflow,eaplatanios/tensorflow,Bulochkin/tensorflow_pack,caisq/tensorflow,thjashin/tensorflow,MostafaGazar/tensorflow,nolanliou/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Mistobaan/tensorflow,ravindrapanda/tensorflow,sandeepdsouza93/TensorFlow-15712,hfp/tensorflow-xsmm,AnishShah/tensorflow,ghchinoy/tensorflow,lukeiwanski/tensorflow,laszlocsomor/tensorflow,ychfan/tensorflow,juharris/tensorflow,DavidNorman/tensorflow,karllessard/tensorflow,chenjun0210/tensorflow,aselle/tensorflow,manazhao/tf_recsys,alisidd/tensorflow,anand-c-goog/tensorflow,alheinecke/tensorflow-xsmm,laszlocsomor/tensorflow,calebfoss/tensorflow,ppries/tensorflow,lukeiwanski/tensorflow,sjperkins/tensorflow,MostafaGazar/tensorflow,hsaputra/tensorflow,johndpope/tensorflow,tntnatbry/tensorflow,yongtang/tensorflow,jart/tensorflow,mixturemodel-flow/tensorflow,krikru/tensorflow-opencl,LUTAN/tensorflow,annarev/tensorflow,nanditav/15712-TensorFlow,frreiss/tensorflow-fred,sandeepgupta2k4/tensorflow,DCSaunders/tensorflow,aldian/tensorflow,Bulochkin/tensorflow_pack,rabipanda/tensorflow,kevin-coder/tensorflow-fork,RapidApplicationDevelopment/tensorflow,DavidNorman/tensorflow,Kongsea/tensorflow,brchiu/tensorflow,cxxgtxy/tensorflow,girving/tensorflow,markslwong/tensorflow,memo/tensorflow,paolodedios/tensorflow,vrv/tensorflow,maciekcc/tensorflow,pavelchristof/gomoku-ai,yaroslavvb/tensorflow,manipopopo/tensorflow,jostep/tensorflow,jalexvig/tensorflow,lukeiwanski/tensorflow-opencl,eaplatanios/tensorflow,andrewcmyers/tensorflow,tensorflow/tensorflow,xzturn/tensorflow,nolanliou/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,Mazecreator/tensorflow,Intel-Corporation/tensorflow,kobejean/tensorflow,manazhao/tf_recsys,arborh/tensorflow,AnishShah/tensorflow,MycChiu/tensorflow,pierreg/tensorflow,kamcpp/tensorflow,aselle/tensorflow,jbedorf/tensorflow,mengxn/tensorflow,chemelnucfin/tensorflow,krikru/tensorflow-opencl,girving/tensorflow,av8ramit/tensorflow,pcm17/tensorflow,chenjun0210/tensorflow,snnn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xodus7/tensorflow,ppwwyyxx/tensorflow,Carmezim/tensorflow,sjperkins/tensorflow,brchiu/tensorflow,laszlocsomor/tensorflow,AndreasMadsen/tensorflow,thjashin/tensorflow,tensorflow/tensorflow,abhitopia/tensorflow,zycdragonball/tensorflow,nolanliou/tensorflow,yongtang/tensorflow,ppwwyyxx/tensorflow,annarev/tensorflow,dendisuhubdy/tensorflow,lakshayg/tensorflow,seaotterman/tensorflow,allenlavoie/tensorflow,alsrgv/tensorflow,snnn/tensorflow,alivecor/tensorflow,markslwong/tensorflow,hsaputra/tensorflow,gautam1858/tensorflow,gunan/tensorflow,laosiaudi/tensorflow,XueqingLin/tensorflow,benoitsteiner/tensorflow-opencl,eerwitt/tensorflow,tiagofrepereira2012/tensorflow,jhaux/tensorflow,whn09/tensorflow,benoitsteiner/tensorflow,allenlavoie/tensorflow,taknevski/tensorflow-xsmm,sandeepdsouza93/TensorFlow-15712,JVillella/tensorflow,llhe/tensorflow,chris-chris/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yaroslavvb/tensorflow,rabipanda/tensorflow,Carmezim/tensorflow,suiyuan2009/tensorflow,gojira/tensorflow,manjunaths/tensorflow,aselle/tensorflow,admcrae/tensorflow,jwlawson/tensorflow,ArtsiomCh/tensorflow,codrut3/tensorflow,apark263/tensorflow,jbedorf/tensorflow,eerwitt/tensorflow,unsiloai/syntaxnet-ops-hack,tensorflow/tensorflow,yanchen036/tensorflow,a-doumoulakis/tensorflow,xodus7/tensorflow,ageron/tensorflow,kamcpp/tensorflow,jart/tensorflow,renyi533/tensorflow,nightjean/Deep-Learning,alisidd/tensorflow,pcm17/tensorflow,meteorcloudy/tensorflow,Moriadry/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,renyi533/tensorflow,Bismarrck/tensorflow,rdipietro/tensorflow,AnishShah/tensorflow,nanditav/15712-TensorFlow,nikste/tensorflow,arborh/tensorflow,jbedorf/tensorflow,allenlavoie/tensorflow,ppries/tensorflow,JingJunYin/tensorflow,aam-at/tensorflow,MostafaGazar/tensorflow,theflofly/tensorflow,allenlavoie/tensorflow,unsiloai/syntaxnet-ops-hack,tongwang01/tensorflow,gunan/tensorflow,mrry/tensorflow,ville-k/tensorflow,alsrgv/tensorflow,manjunaths/tensorflow,wangyum/tensorflow,nburn42/tensorflow,abhitopia/tensorflow,mengxn/tensorflow,bowang/tensorflow,benoitsteiner/tensorflow,a-doumoulakis/tensorflow,eerwitt/tensorflow,nikste/tensorflow,suiyuan2009/tensorflow,alshedivat/tensorflow,dendisuhubdy/tensorflow,eerwitt/tensorflow,benoitsteiner/tensorflow,freedomtan/tensorflow,jbedorf/tensorflow,jhseu/tensorflow,kevin-coder/tensorflow-fork,kevin-coder/tensorflow-fork,jendap/tensorflow,Mistobaan/tensorflow,lukeiwanski/tensorflow-opencl,tornadozou/tensorflow,AnishShah/tensorflow,memo/tensorflow,mortada/tensorflow,dongjoon-hyun/tensorflow,DCSaunders/tensorflow,Carmezim/tensorflow,cxxgtxy/tensorflow,av8ramit/tensorflow,Moriadry/tensorflow,mrry/tensorflow,strint/tensorflow,benoitsteiner/tensorflow-xsmm,hsaputra/tensorflow,nburn42/tensorflow,kobejean/tensorflow,dongjoon-hyun/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,eaplatanios/tensorflow,nburn42/tensorflow,asimshankar/tensorflow,whn09/tensorflow,aam-at/tensorflow,memo/tensorflow,dongjoon-hyun/tensorflow,nightjean/Deep-Learning,tornadozou/tensorflow,tiagofrepereira2012/tensorflow,jart/tensorflow,caisq/tensorflow,gibiansky/tensorflow,karllessard/tensorflow,raymondxyang/tensorflow,Bulochkin/tensorflow_pack,aam-at/tensorflow,caisq/tensorflow,SnakeJenny/TensorFlow,tensorflow/tensorflow-pywrap_tf_optimizer,alshedivat/tensorflow,xzturn/tensorflow,wangyum/tensorflow,nburn42/tensorflow,sjperkins/tensorflow,Xeralux/tensorflow,lukeiwanski/tensorflow-opencl,alsrgv/tensorflow,seanli9jan/tensorflow,ageron/tensorflow,alistairlow/tensorflow,dongjoon-hyun/tensorflow,XueqingLin/tensorflow,krikru/tensorflow-opencl,apark263/tensorflow,MoamerEncsConcordiaCa/tensorflow,LUTAN/tensorflow,ZhangXinNan/tensorflow,adamtiger/tensorflow,eerwitt/tensorflow,ghchinoy/tensorflow,MostafaGazar/tensorflow,yanchen036/tensorflow,ArtsiomCh/tensorflow,taknevski/tensorflow-xsmm,martinwicke/tensorflow,paolodedios/tensorflow,hehongliang/tensorflow,maciekcc/tensorflow,thesuperzapper/tensorflow,sarvex/tensorflow,rdipietro/tensorflow,thesuperzapper/tensorflow,jhseu/tensorflow,ageron/tensorflow,martinwicke/tensorflow,petewarden/tensorflow,handroissuazo/tensorflow,jbedorf/tensorflow,xzturn/tensorflow,krikru/tensorflow-opencl,maciekcc/tensorflow,Carmezim/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,seaotterman/tensorflow,davidzchen/tensorflow,gnieboer/tensorflow,benoitsteiner/tensorflow-opencl,yanchen036/tensorflow,benoitsteiner/tensorflow-xsmm,karllessard/tensorflow,xzturn/tensorflow,apark263/tensorflow,ghchinoy/tensorflow,aldian/tensorflow,dendisuhubdy/tensorflow,JingJunYin/tensorflow,sjperkins/tensorflow,kevin-coder/tensorflow-fork,arborh/tensorflow,anand-c-goog/tensorflow,ishay2b/tensorflow,caisq/tensorflow,code-sauce/tensorflow,chris-chris/tensorflow,thjashin/tensorflow,Intel-Corporation/tensorflow,unsiloai/syntaxnet-ops-hack,nolanliou/tensorflow,nightjean/Deep-Learning,gojira/tensorflow,JingJunYin/tensorflow,asadziach/tensorflow,pierreg/tensorflow,asimshankar/tensorflow,abhitopia/tensorflow,seaotterman/tensorflow,ageron/tensorflow,mixturemodel-flow/tensorflow,horance-liu/tensorflow,mdrumond/tensorflow,jhseu/tensorflow,johndpope/tensorflow,dendisuhubdy/tensorflow,lukeiwanski/tensorflow-opencl,ishay2b/tensorflow,renyi533/tensorflow,asimshankar/tensorflow,naturali/tensorflow,code-sauce/tensorflow,markslwong/tensorflow,alsrgv/tensorflow,Moriadry/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yaroslavvb/tensorflow,ravindrapanda/tensorflow,Intel-tensorflow/tensorflow,handroissuazo/tensorflow,SnakeJenny/TensorFlow,ppries/tensorflow,odejesush/tensorflow,dendisuhubdy/tensorflow,dyoung418/tensorflow,sandeepdsouza93/TensorFlow-15712,asadziach/tensorflow,tillahoffmann/tensorflow,arborh/tensorflow,kobejean/tensorflow,DavidNorman/tensorflow,freedomtan/tensorflow,wangyum/tensorflow,frreiss/tensorflow-fred,yanchen036/tensorflow,alisidd/tensorflow,hfp/tensorflow-xsmm,lukeiwanski/tensorflow,aselle/tensorflow,HKUST-SING/tensorflow,anilmuthineni/tensorflow,tongwang01/tensorflow | tensorflow/contrib/layers/__init__.py | tensorflow/contrib/layers/__init__.py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for building neural network layers, regularizers, summaries, etc.
## Higher level ops for building neural network layers.
This package provides several ops that take care of creating variables that are
used internally in a consistent way and provide the building blocks for many
common machine learning algorithms.
@@avg_pool2d
@@batch_norm
@@convolution2d
@@convolution2d_in_plane
@@convolution2d_transpose
@@flatten
@@fully_connected
@@max_pool2d
@@one_hot_encoding
@@repeat
@@separable_convolution2d
@@stack
@@unit_norm
Aliases for fully_connected which set a default activation function are
available: `relu`, `relu6` and `linear`.
## Regularizers
Regularization can help prevent overfitting. These have the signature
`fn(weights)`. The loss is typically added to `tf.GraphKeys.REGULARIZATION_LOSS`
@@apply_regularization
@@l1_regularizer
@@l2_regularizer
@@sum_regularizer
## Initializers
Initializers are used to initialize variables with sensible values given their
size, data type, and purpose.
@@xavier_initializer
@@xavier_initializer_conv2d
@@variance_scaling_initializer
## Optimization
Optimize weights given a loss.
@@optimize_loss
## Summaries
Helper functions to summarize specific variables or ops.
@@summarize_activation
@@summarize_tensor
@@summarize_tensors
@@summarize_collection
The layers module defines convenience functions `summarize_variables`,
`summarize_weights` and `summarize_biases`, which set the `collection` argument
of `summarize_collection` to `VARIABLES`, `WEIGHTS` and `BIASES`, respectively.
@@summarize_activations
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
# pylint: disable=unused-import,wildcard-import
from tensorflow.contrib.layers.python.layers import *
from tensorflow.python.util.all_util import make_all
__all__ = make_all(__name__)
| # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for building neural network layers, regularizers, summaries, etc.
## Higher level ops for building neural network layers.
This package provides several ops that take care of creating variables that are
used internally in a consistent way and provide the building blocks for many
common machine learning algorithms.
@@convolution2d
@@fully_connected
Aliases for fully_connected which set a default activation function are
available: `relu`, `relu6` and `linear`.
## Regularizers
Regularization can help prevent overfitting. These have the signature
`fn(weights)`. The loss is typically added to `tf.GraphKeys.REGULARIZATION_LOSS`
@@apply_regularization
@@l1_regularizer
@@l2_regularizer
@@sum_regularizer
## Initializers
Initializers are used to initialize variables with sensible values given their
size, data type, and purpose.
@@xavier_initializer
@@xavier_initializer_conv2d
@@variance_scaling_initializer
## Optimization
Optimize weights given a loss.
@@optimize_loss
## Summaries
Helper functions to summarize specific variables or ops.
@@summarize_activation
@@summarize_tensor
@@summarize_tensors
@@summarize_collection
The layers module defines convenience functions `summarize_variables`,
`summarize_weights` and `summarize_biases`, which set the `collection` argument
of `summarize_collection` to `VARIABLES`, `WEIGHTS` and `BIASES`, respectively.
@@summarize_activations
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
# pylint: disable=unused-import,wildcard-import
from tensorflow.contrib.layers.python.layers import *
from tensorflow.python.util.all_util import make_all
__all__ = make_all(__name__)
| apache-2.0 | Python |
f247e3d31b25908a46cb64754166fca2a421edd9 | Update P1_lookingBusy.py added docstring | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | pythontutorials/books/AutomateTheBoringStuff/Ch18/Projects/P1_lookingBusy.py | pythontutorials/books/AutomateTheBoringStuff/Ch18/Projects/P1_lookingBusy.py | """Looking busy
Write a script to nudge your mouse cursor slightly every ten seconds.
The nudge should be small enough so that it won’t get in the way if
you do happen to need to use your computer while the script is running.
"""
import pyautogui, time
def main():
while True:
time.sleep(10)
pyautogui.moveRel(1, 0)
# If run directly (instead of imported), run main()
if __name__ == '__main__':
main()
| # Write a script to nudge your mouse cursor slightly every ten seconds.
# The nudge should be small enough so that it won’t get in the way if
# you do happen to need to use your computer while the script is running.
import pyautogui, time
def main():
while True:
time.sleep(10)
pyautogui.moveRel(1, 0)
# If run directly (instead of imported), run main()
if __name__ == '__main__':
main()
| mit | Python |
e8e3a7daaa1e6afc4c8f9853f6db77dcd557f4d3 | Call 'lower()' on the input | instagrambot/instabot,ohld/instabot,instagrambot/instabot | examples/black-whitelist/whitelist_generator.py | examples/black-whitelist/whitelist_generator.py | """
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line.lower():
f.write(str(user_id) + "\n")
print("ADDED.\r")
| """
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
| apache-2.0 | Python |
c2bbc9b3c977377083dacf475715ed38a6def539 | Update package_utils.py | inetprocess/docker-lamp,inetprocess/docker-lamp,edyan/stakkr,edyan/stakkr,inetprocess/docker-lamp,edyan/stakkr | stakkr/package_utils.py | stakkr/package_utils.py | """Gives useful information about the current virtualenv, files locations if
stakkr is installed as a package or directly cloned"""
import os
import sys
from distutils.sysconfig import get_config_vars, get_python_lib
def get_venv_basedir():
"""Returns the base directory of the virtualenv, useful to read configuration and plugins"""
exec_prefix = get_config_vars()['exec_prefix']
has_real_prefix = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix)
if has_real_prefix is False or (hasattr(sys, 'real_prefix') and exec_prefix.startswith(sys.real_prefix)):
raise EnvironmentError('You must be in a virtual environment')
return os.path.abspath(get_config_vars()['exec_prefix'] + '/../')
def get_dir(dirname: str):
"""Detects if stakkr is a package or a clone and gives the right path for a directory"""
staticdir = os.path.dirname(os.path.realpath(__file__)) + '/' + dirname
if os.path.isdir(staticdir) is True:
return staticdir
return get_python_lib() + '/stakkr/' + dirname
def get_file(dirname: str, filename: str):
"""Detects if stakkr is a package or a clone and gives the right path for a file"""
return get_dir(dirname) + '/' + filename.lstrip('/')
| """Gives useful information about the current virtualenv, files locations if
stakkr is installed as a package or directly cloned"""
import os
import sys
from distutils.sysconfig import get_config_vars, get_python_lib
def get_venv_basedir():
"""Returns the base directory of the virtualenv, useful to read configuration and plugins"""
exec_prefix = get_config_vars()['exec_prefix']
if hasattr(sys, 'real_prefix') is False or exec_prefix.startswith(sys.real_prefix):
raise EnvironmentError('You must be in a virtual environment')
return os.path.abspath(get_config_vars()['exec_prefix'] + '/../')
def get_dir(dirname: str):
"""Detects if stakkr is a package or a clone and gives the right path for a directory"""
staticdir = os.path.dirname(os.path.realpath(__file__)) + '/' + dirname
if os.path.isdir(staticdir) is True:
return staticdir
return get_python_lib() + '/stakkr/' + dirname
def get_file(dirname: str, filename: str):
"""Detects if stakkr is a package or a clone and gives the right path for a file"""
return get_dir(dirname) + '/' + filename.lstrip('/')
| apache-2.0 | Python |
9917150d995bcb4956afdec73256319bd7d042d4 | Update ELU layer | explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc | thinc/neural/_classes/elu.py | thinc/neural/_classes/elu.py | from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
class ELU(Affine):
def predict(self, input__BI):
output__BO = Affine.predict(self, input__BI)
self.ops.elu(output__BO, inplace=True)
return output__BO
def begin_update(self, input__BI):
output__BO, finish_affine = Affine.begin_update(self, input__BI)
output_copy = self.ops.xp.ascontiguousarray(output__BO, dtype='f')
self.ops.elu(output_copy, inplace=True)
def finish_update(gradient):
gradient = self.ops.xp.ascontiguousarray(gradient, dtype='f')
self.ops.backprop_elu(gradient, output_copy, inplace=True)
return finish_affine(gradient)
output__BO[:] = output_copy
return output__BO, finish_update
| from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
class ELU(Affine):
def predict(self, input__BI):
output__BO = Affine.predict(self, input__BI)
self.ops.elu(output__BO, inplace=True)
return output__BO
def begin_update(self, input__BI):
output__BO, finish_affine = Affine.begin_update(self, input__BI)
output_copy = self.ops.xp.ascontiguousarray(output__BO, dtype='f')
self.ops.elu(output_copy, inplace=True)
def finish_update(gradient):
gradient = self.ops.xp.ascontiguousarray(gradient, dtype='f')
self.ops.backprop_elu(gradient, output_copy, inplace=True)
return finish_affine(gradient, *args, **kwargs)
output__BO[:] = output_copy
return output__BO, finish_update
| mit | Python |
02616c3edbe3cd5abf18cc9b8ece7e627898776b | Fix RemovedInDjango19Warning | HonzaKral/django-threadedcomments,HonzaKral/django-threadedcomments | threadedcomments/__init__.py | threadedcomments/__init__.py | """
Change the attributes you want to customize
"""
# following PEP 440
__version__ = "1.0"
def get_model():
from threadedcomments.models import ThreadedComment
return ThreadedComment
def get_form():
from threadedcomments.forms import ThreadedCommentForm
return ThreadedCommentForm
| """
Change the attributes you want to customize
"""
from threadedcomments.models import ThreadedComment
from threadedcomments.forms import ThreadedCommentForm
# following PEP 440
__version__ = "1.0"
def get_model():
return ThreadedComment
def get_form():
return ThreadedCommentForm
| bsd-3-clause | Python |
b575ea3b68cdc8022e4490217a5321f6da75bb1f | Fix to function to check if grid engine is present | benedictpaten/jobTree,BD2KGenomics/slugflow,cooketho/jobTree,BD2KGenomics/toil-old,BD2KGenomics/slugflow,harvardinformatics/jobTree | src/common.py | src/common.py | """Wrapper functions for running the various programs in the jobTree package.
"""
#Copyright (C) 2011 by Benedict Paten (benedictpaten@gmail.com)
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from jobTree.src.bioio import logger
from jobTree.src.bioio import system
def runJobTreeStats(jobTree, outputFile):
system("jobTreeStats --jobTree %s --outputFile %s" % (jobTree, outputFile))
logger.info("Ran the job-tree stats command apparently okay")
def gridEngineIsInstalled():
"""Returns True if grid-engine is installed, else False.
"""
try:
return system("qstat -help") == 0
except RuntimeError:
return False
def parasolIsInstalled():
"""Returns True if parasol is installed, else False.
"""
try:
return system("parasol status") == 0
except RuntimeError:
return False
| """Wrapper functions for running the various programs in the jobTree package.
"""
#Copyright (C) 2011 by Benedict Paten (benedictpaten@gmail.com)
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from jobTree.src.bioio import logger
from jobTree.src.bioio import system
def runJobTreeStats(jobTree, outputFile):
system("jobTreeStats --jobTree %s --outputFile %s" % (jobTree, outputFile))
logger.info("Ran the job-tree stats command apparently okay")
def gridEngineIsInstalled():
"""Returns True if grid-engine is installed, else False.
"""
try:
return system("qstat -version") == 0
except RuntimeError:
return False
def parasolIsInstalled():
"""Returns True if parasol is installed, else False.
"""
try:
return system("parasol status") == 0
except RuntimeError:
return False
| mit | Python |
40aae5592603005a84ee2222eaa07a879f99309d | fix test | aleneum/kogniserver,aleneum/kogniserver | tests/test_bridge.py | tests/test_bridge.py | # -*- coding: utf-8 -*-
from unittest import TestCase
from kogniserver.services import create_rsb_config
from kogniserver.pubsub import PubSubBridge
from mock import MagicMock
class TestKogniServerBridge(TestCase):
def setUp(self):
self.config = create_rsb_config()
def tearDown(self):
self.b.deactivate()
#def __init__(self, rsb_scope, rsb_config, wamp, message_type, mode=BIDIRECTIONAL, wamp_scope=None):
def test_to_wamp(self):
self.b = PubSubBridge('/foo/bar', self.config, MagicMock(), 'string', PubSubBridge.RSB_TO_WAMP)
self.assertIsNone(self.b.rsb_publisher)
self.assertEqual(self.b.rsb_scope, '/foo/bar')
self.assertEqual(self.b.wamp_scope, 'foo.bar')
self.assertIsNotNone(self.b.rsb_listener)
def test_from_wamp(self):
self.b = PubSubBridge('/foo/bar', self.config, MagicMock(), 'string', PubSubBridge.WAMP_TO_RSB)
self.assertIsNone(self.b.rsb_listener)
self.assertEqual(self.b.rsb_scope, '/foo/bar')
self.assertEqual(self.b.wamp_scope, 'foo.bar')
self.assertIsNotNone(self.b.rsb_publisher)
def test_bidirectional(self):
self.b = PubSubBridge('/foo/bar', self.config, MagicMock(), 'string', PubSubBridge.BIDIRECTIONAL)
self.assertIsNotNone(self.b.rsb_listener)
self.assertEqual(self.b.rsb_scope, '/foo/bar')
self.assertEqual(self.b.wamp_scope, 'foo.bar')
self.assertIsNotNone(self.b.rsb_publisher) | # -*- coding: utf-8 -*-
from unittest import TestCase
from kogniserver.services import create_rsb_config, Bridge
from mock import MagicMock
class TestKogniServerBridge(TestCase):
def setUp(self):
self.config = create_rsb_config()
def tearDown(self):
self.b.deactivate()
#def __init__(self, rsb_scope, rsb_config, wamp, message_type, mode=BIDIRECTIONAL, wamp_scope=None):
def test_to_wamp(self):
self.b = Bridge('/foo/bar', self.config, MagicMock(), 'string', Bridge.RSB_TO_WAMP)
self.assertIsNone(self.b.rsb_publisher)
self.assertEqual(self.b.rsb_scope, '/foo/bar')
self.assertEqual(self.b.wamp_scope, 'foo.bar')
self.assertIsNotNone(self.b.rsb_listener)
def test_from_wamp(self):
self.b = Bridge('/foo/bar', self.config, MagicMock(), 'string', Bridge.WAMP_TO_RSB)
self.assertIsNone(self.b.rsb_listener)
self.assertEqual(self.b.rsb_scope, '/foo/bar')
self.assertEqual(self.b.wamp_scope, 'foo.bar')
self.assertIsNotNone(self.b.rsb_publisher)
def test_bidirectional(self):
self.b = Bridge('/foo/bar', self.config, MagicMock(), 'string', Bridge.BIDIRECTIONAL)
self.assertIsNotNone(self.b.rsb_listener)
self.assertEqual(self.b.rsb_scope, '/foo/bar')
self.assertEqual(self.b.wamp_scope, 'foo.bar')
self.assertIsNotNone(self.b.rsb_publisher) | mit | Python |
aee7b9b50c27bc9f645316c9422b3b187e54e192 | set backend to avoid errors in Travis | mroberge/hydrofunctions | tests/test_charts.py | tests/test_charts.py | # -*- coding: utf-8 -*-
"""
test_charts.py
Tests for the charts.py module.
"""
from __future__ import absolute_import, print_function
import unittest
import matplotlib
# Recommended that I use this line to avoid errors in TravisCI
# See https://matplotlib.org/faq/howto_faq.html
# Basically, matplotlib usually uses an 'X11 connection' by default; Travis CI
# does not have this configured, so you need to set your backend explicitly.
matplotlib.use('Agg')
import pandas as pd
from hydrofunctions import charts
dummy = {'col1': [1, 2, 3, 38, 23, 1, 19],
'col2': [3, 4, 45, 23, 2, 4, 76]}
class TestFlowDuration(unittest.TestCase):
def test_charts_flowduration_exists(self):
expected = pd.DataFrame(data=dummy)
actual_fig, actual_ax = charts.flow_duration(expected)
self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
self.assertIsInstance(actual_ax, matplotlib.axes.Axes)
def test_charts_flowduration_defaults(self):
expected = pd.DataFrame(data=dummy)
actual_fig, actual_ax = charts.flow_duration(expected)
actual_xscale = actual_ax.xaxis.get_scale()
actual_yscale = actual_ax.yaxis.get_scale()
actual_ylabel = actual_ax.yaxis.get_label_text()
actual_marker = actual_ax.get_lines()[0].get_marker()
self.assertEqual(actual_xscale, 'logit')
self.assertEqual(actual_yscale, 'log')
self.assertEqual(actual_ylabel, 'Discharge')
self.assertEqual(actual_marker, '.')
def test_charts_flowduration_accepts_params(self):
expected = pd.DataFrame(data=dummy)
params = {'xscale': 'linear',
'yscale': 'linear',
'ylabel': 'test value',
'symbol': ','}
actual_fig, actual_ax = charts.flow_duration(expected, **params)
actual_xscale = actual_ax.xaxis.get_scale()
actual_yscale = actual_ax.yaxis.get_scale()
actual_ylabel = actual_ax.yaxis.get_label_text()
actual_marker = actual_ax.get_lines()[0].get_marker()
self.assertEqual(actual_xscale, 'linear')
self.assertEqual(actual_yscale, 'linear')
self.assertEqual(actual_ylabel, 'test value')
self.assertEqual(actual_marker, ',')
if __name__ == '__main__':
unittest.main(verbosity=2)
| # -*- coding: utf-8 -*-
"""
test_charts.py
Tests for the charts.py module.
"""
from __future__ import absolute_import, print_function
import unittest
import matplotlib
import pandas as pd
from hydrofunctions import charts
dummy = {'col1': [1, 2, 3, 38, 23, 1, 19],
'col2': [3, 4, 45, 23, 2, 4, 76]}
class TestFlowDuration(unittest.TestCase):
def test_charts_flowduration_exists(self):
expected = pd.DataFrame(data=dummy)
actual_fig, actual_ax = charts.flow_duration(expected)
self.assertIsInstance(actual_fig, matplotlib.figure.Figure)
self.assertIsInstance(actual_ax, matplotlib.axes.Axes)
def test_charts_flowduration_defaults(self):
expected = pd.DataFrame(data=dummy)
actual_fig, actual_ax = charts.flow_duration(expected)
actual_xscale = actual_ax.xaxis.get_scale()
actual_yscale = actual_ax.yaxis.get_scale()
actual_ylabel = actual_ax.yaxis.get_label_text()
actual_marker = actual_ax.get_lines()[0].get_marker()
self.assertEqual(actual_xscale, 'logit')
self.assertEqual(actual_yscale, 'log')
self.assertEqual(actual_ylabel, 'Discharge')
self.assertEqual(actual_marker, '.')
def test_charts_flowduration_accepts_params(self):
expected = pd.DataFrame(data=dummy)
params = {'xscale': 'linear',
'yscale': 'linear',
'ylabel': 'test value',
'symbol': ','}
actual_fig, actual_ax = charts.flow_duration(expected, **params)
actual_xscale = actual_ax.xaxis.get_scale()
actual_yscale = actual_ax.yaxis.get_scale()
actual_ylabel = actual_ax.yaxis.get_label_text()
actual_marker = actual_ax.get_lines()[0].get_marker()
self.assertEqual(actual_xscale, 'linear')
self.assertEqual(actual_yscale, 'linear')
self.assertEqual(actual_ylabel, 'test value')
self.assertEqual(actual_marker, ',')
if __name__ == '__main__':
unittest.main(verbosity=2)
| mit | Python |
7faffbef68faf268bd310d07df1c24368379dfb9 | Fix PEP8 for errorck | aidancully/rust,jashank/rust,mdinger/rust,carols10cents/rust,XMPPwocky/rust,hauleth/rust,aidancully/rust,AerialX/rust-rt-minimal,miniupnp/rust,aneeshusa/rust,philyoon/rust,mdinger/rust,mvdnes/rust,richo/rust,sae-bom/rust,rprichard/rust,zachwick/rust,bombless/rust,TheNeikos/rust,mahkoh/rust,untitaker/rust,TheNeikos/rust,GBGamer/rust,rohitjoshi/rust,zachwick/rust,ebfull/rust,victorvde/rust,pelmers/rust,carols10cents/rust,mdinger/rust,mahkoh/rust,mahkoh/rust,carols10cents/rust,kwantam/rust,untitaker/rust,jroesch/rust,jroesch/rust,mdinger/rust,graydon/rust,jroesch/rust,jroesch/rust,nwin/rust,nwin/rust,reem/rust,GBGamer/rust,krzysz00/rust,dwillmer/rust,ebfull/rust,aepsil0n/rust,nwin/rust,andars/rust,dwillmer/rust,graydon/rust,carols10cents/rust,aepsil0n/rust,jroesch/rust,mihneadb/rust,zubron/rust,rprichard/rust,vhbit/rust,seanrivera/rust,jashank/rust,GBGamer/rust,untitaker/rust,philyoon/rust,aneeshusa/rust,untitaker/rust,dwillmer/rust,pshc/rust,seanrivera/rust,graydon/rust,cllns/rust,rprichard/rust,zubron/rust,mahkoh/rust,ruud-v-a/rust,miniupnp/rust,TheNeikos/rust,vhbit/rust,jroesch/rust,robertg/rust,vhbit/rust,sae-bom/rust,KokaKiwi/rust,robertg/rust,rohitjoshi/rust,KokaKiwi/rust,l0kod/rust,bombless/rust,untitaker/rust,AerialX/rust-rt-minimal,avdi/rust,avdi/rust,reem/rust,AerialX/rust,ruud-v-a/rust,richo/rust,mihneadb/rust,andars/rust,rohitjoshi/rust,zaeleus/rust,mvdnes/rust,AerialX/rust-rt-minimal,krzysz00/rust,richo/rust,KokaKiwi/rust,graydon/rust,andars/rust,nwin/rust,avdi/rust,andars/rust,jashank/rust,aidancully/rust,hauleth/rust,mihneadb/rust,reem/rust,andars/rust,ruud-v-a/rust,jashank/rust,ebfull/rust,carols10cents/rust,carols10cents/rust,jashank/rust,philyoon/rust,l0kod/rust,zaeleus/rust,zaeleus/rust,kwantam/rust,AerialX/rust-rt-minimal,ruud-v-a/rust,zubron/rust,mihneadb/rust,mahkoh/rust,avdi/rust,dwillmer/rust,krzysz00/rust,l0kod/rust,ejjeong/rust,aidancully/rust,krzysz00/rust,KokaKiwi/rust,cllns/rust,bombless/rust,victorvde/rust,TheNeikos/rust,pshc/rust,mdinger/rust,zaeleus/rust,omasanori/rust,graydon/rust,nwin/rust,victorvde/rust,miniupnp/rust,gifnksm/rust,krzysz00/rust,dwillmer/rust,vhbit/rust,victorvde/rust,pelmers/rust,miniupnp/rust,zachwick/rust,pshc/rust,aneeshusa/rust,pshc/rust,mdinger/rust,sae-bom/rust,XMPPwocky/rust,rohitjoshi/rust,GBGamer/rust,robertg/rust,kwantam/rust,richo/rust,omasanori/rust,AerialX/rust,cllns/rust,dwillmer/rust,sae-bom/rust,philyoon/rust,jroesch/rust,rohitjoshi/rust,seanrivera/rust,bombless/rust,KokaKiwi/rust,AerialX/rust,AerialX/rust-rt-minimal,reem/rust,seanrivera/rust,gifnksm/rust,GBGamer/rust,robertg/rust,zubron/rust,ejjeong/rust,vhbit/rust,mihneadb/rust,pelmers/rust,AerialX/rust-rt-minimal,dwillmer/rust,l0kod/rust,cllns/rust,mahkoh/rust,jroesch/rust,aepsil0n/rust,omasanori/rust,aneeshusa/rust,AerialX/rust,pshc/rust,jashank/rust,aidancully/rust,sae-bom/rust,ebfull/rust,TheNeikos/rust,mvdnes/rust,ruud-v-a/rust,vhbit/rust,vhbit/rust,aepsil0n/rust,l0kod/rust,seanrivera/rust,robertg/rust,reem/rust,nwin/rust,avdi/rust,hauleth/rust,miniupnp/rust,pshc/rust,aneeshusa/rust,XMPPwocky/rust,pshc/rust,sae-bom/rust,TheNeikos/rust,KokaKiwi/rust,victorvde/rust,mvdnes/rust,aepsil0n/rust,hauleth/rust,victorvde/rust,omasanori/rust,zaeleus/rust,rprichard/rust,zubron/rust,ebfull/rust,rprichard/rust,miniupnp/rust,pelmers/rust,XMPPwocky/rust,GBGamer/rust,l0kod/rust,bombless/rust,miniupnp/rust,XMPPwocky/rust,ejjeong/rust,kwantam/rust,cllns/rust,miniupnp/rust,gifnksm/rust,AerialX/rust,mvdnes/rust,robertg/rust,gifnksm/rust,hauleth/rust,jashank/rust,GBGamer/rust,hauleth/rust,gifnksm/rust,kwantam/rust,ejjeong/rust,nwin/rust,dwillmer/rust,richo/rust,ejjeong/rust,ejjeong/rust,zaeleus/rust,aepsil0n/rust,philyoon/rust,omasanori/rust,zachwick/rust,zachwick/rust,pelmers/rust,mvdnes/rust,gifnksm/rust,philyoon/rust,l0kod/rust,aidancully/rust,ruud-v-a/rust,zubron/rust,andars/rust,zachwick/rust,seanrivera/rust,l0kod/rust,rprichard/rust,pshc/rust,avdi/rust,richo/rust,cllns/rust,aneeshusa/rust,ebfull/rust,reem/rust,AerialX/rust,XMPPwocky/rust,nwin/rust,jashank/rust,mihneadb/rust,bombless/rust,untitaker/rust,pelmers/rust,kwantam/rust,GBGamer/rust,zubron/rust,vhbit/rust,rohitjoshi/rust,krzysz00/rust,zubron/rust,omasanori/rust,graydon/rust | src/etc/errorck.py | src/etc/errorck.py | # Copyright 2015 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# Digs error codes out of files named 'diagnostics.rs' across
# the tree, and ensures thare are no duplicates.
import sys
import os
import re
src_dir = sys.argv[1]
errcode_map = {}
error_re = re.compile("(E\d\d\d\d)")
for (dirpath, dirnames, filenames) in os.walk(src_dir):
if "src/test" in dirpath or "src/llvm" in dirpath:
# Short circuit for fast
continue
for filename in filenames:
if filename != "diagnostics.rs":
continue
path = os.path.join(dirpath, filename)
with open(path, 'r') as f:
for line_num, line in enumerate(f, start=1):
match = error_re.search(line)
if match:
errcode = match.group(1)
new_record = [(errcode, path, line_num, line)]
existing = errcode_map.get(errcode)
if existing is not None:
# This is a dupe
errcode_map[errcode] = existing + new_record
else:
errcode_map[errcode] = new_record
errors = False
all_errors = []
for errcode, entries in errcode_map.items():
all_errors.append(entries[0][0])
if len(entries) > 1:
print("error: duplicate error code " + errcode)
for entry in entries:
print("{1}: {2}\n{3}".format(*entry))
errors = True
print("{0} error codes".format(len(errcode_map)))
print("highest error code: " + max(all_errors))
if errors:
sys.exit(1)
| # Copyright 2015 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# Digs error codes out of files named 'diagnostics.rs' across
# the tree, and ensures thare are no duplicates.
import sys, os, re
src_dir = sys.argv[1]
errcode_map = {}
error_re = re.compile("(E\d\d\d\d)")
for (dirpath, dirnames, filenames) in os.walk(src_dir):
if "src/test" in dirpath or "src/llvm" in dirpath:
# Short circuit for fast
continue
for filename in filenames:
if filename != "diagnostics.rs":
continue
path = os.path.join(dirpath, filename)
with open(path, 'r') as f:
for line_num, line in enumerate(f, start=1):
match = error_re.search(line)
if match:
errcode = match.group(1)
new_record = [(errcode, path, line_num, line)]
existing = errcode_map.get(errcode)
if existing is not None:
# This is a dupe
errcode_map[errcode] = existing + new_record
else:
errcode_map[errcode] = new_record
errors = False
all_errors = []
for errcode, entries in errcode_map.items():
all_errors.append(entries[0][0])
if len(entries) > 1:
print("error: duplicate error code " + errcode)
for entry in entries:
print("{1}: {2}\n{3}".format(*entry))
errors = True
print("{0} error codes".format(len(errcode_map)))
print("highest error code: " + max(all_errors))
if errors:
sys.exit(1)
| apache-2.0 | Python |
e0c0b7676d9e1f2a05610c04c41959ea678c6d8b | Adjust startChar and endChar indices | clarinsi/reldi-lib | restore_all.py | restore_all.py | #!/usr/bin/python
from reldi.restorer import DiacriticRestorer
from getpass import getpass
import json
import argparse
import os
user='user'
coding='utf8'
def write(result,file):
final=set()
text=result['text']
for token,norm in zip(result['tokens']['token'],result['orthography']['correction']):
if token['text']!=norm['text']:
text=text[:int(token['start'])-1]+norm['text']+text[int(token['end']):]
file.write(text.encode(coding))
file.close()
if __name__ == "__main__":
parser=argparse.ArgumentParser(description='Diacritic restorer for Slovene, Croatian and Serbian\nUses the ReLDI API. For access rights visit http://nl.ijs.si/services/.')
parser.add_argument('lang',help='language of the text(s)',choices=['sl','hr','sr'])
parser.add_argument('path',help='Path to the file or folder containing files (with .txt extension) to be tagged and lemmatised.')
args=parser.parse_args()
passwd=getpass('Input password for user "'+user+'": ')
restorer = DiacriticRestorer(args.lang)
restorer.authorize(user, passwd)
if os.path.isfile(args.path):
write(json.loads(restorer.restore(open(args.path).read().decode(coding).encode('utf8'))),open(args.path+'.redi','w'))
else:
for file in os.listdir(args.path):
if file.endswith('.txt'):
write(json.loads(restorer.restore(open(os.path.join(args.path,file)).read().decode(coding).encode('utf8'))),open(os.path.join(args.path,file)+'.redi','w'))
| #!/usr/bin/python
from reldi.restorer import DiacriticRestorer
from getpass import getpass
import json
import argparse
import os
user='user'
coding='utf8'
def write(result,file):
final=set()
text=result['text']
for token,norm in zip(result['tokens']['token'],result['orthography']['correction']):
if token['text']!=norm['text']:
text=text[:int(token['startChar'])-1]+norm['text']+text[int(token['endChar']):]
file.write(text.encode(coding))
file.close()
if __name__ == "__main__":
parser=argparse.ArgumentParser(description='Diacritic restorer for Slovene, Croatian and Serbian\nUses the ReLDI API. For access rights visit http://nl.ijs.si/services/.')
parser.add_argument('lang',help='language of the text(s)',choices=['sl','hr','sr'])
parser.add_argument('path',help='Path to the file or folder containing files (with .txt extension) to be tagged and lemmatised.')
args=parser.parse_args()
passwd=getpass('Input password for user "'+user+'": ')
restorer = DiacriticRestorer(args.lang)
restorer.authorize(user, passwd)
if os.path.isfile(args.path):
write(json.loads(restorer.restore(open(args.path).read().decode(coding).encode('utf8'))),open(args.path+'.redi','w'))
else:
for file in os.listdir(args.path):
if file.endswith('.txt'):
write(json.loads(restorer.restore(open(os.path.join(args.path,file)).read().decode(coding).encode('utf8'))),open(os.path.join(args.path,file)+'.redi','w'))
| apache-2.0 | Python |
9868cd4e810d6545d292fe1480d8fa413d00a8cd | truncate long title | hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel | funnel/extapi/boxoffice.py | funnel/extapi/boxoffice.py | # -*- coding: utf-8 -*-
import requests
from ..util import extract_twitter_handle
from .. import app
__all__ = ['Boxoffice']
class Boxoffice(object):
"""
An interface that enables data retrieval from Boxoffice.
"""
def __init__(self, access_token):
self.access_token = access_token
self.base_url = app.config['BOXOFFICE_SERVER']
def url_for(self, endpoint):
return self.base_url.format(endpoint)
def get_orders(self, ic):
resp = requests.get(self.base_url + '/ic/{ic}/orders?access_token={token}'.format(ic=ic, token=self.access_token))
return resp.json().get('orders')
def get_tickets(self, ic):
tickets = []
for order in self.get_orders(ic):
for line_item in order.get('line_items'):
if line_item.get('assignee'):
tickets.append({
'fullname': line_item.get('assignee').get('fullname', ''),
'email': line_item.get('assignee').get('email'),
'phone': line_item.get('assignee').get('phone', ''),
'twitter': extract_twitter_handle(line_item.get('assignee').get('twitter', '')),
'company': line_item.get('assignee').get('company'),
'city': line_item.get('assignee').get('city', ''),
'job_title': line_item.get('assignee').get('jobtitle', ''),
'ticket_no': unicode(line_item.get('line_item_seq')),
'ticket_type': line_item.get('item', {}).get('title', '')[:80],
'order_no': unicode(order.get('invoice_no')),
})
return tickets
| # -*- coding: utf-8 -*-
import requests
from ..util import extract_twitter_handle
from .. import app
__all__ = ['Boxoffice']
class Boxoffice(object):
"""
An interface that enables data retrieval from Boxoffice.
"""
def __init__(self, access_token):
self.access_token = access_token
self.base_url = app.config['BOXOFFICE_SERVER']
def url_for(self, endpoint):
return self.base_url.format(endpoint)
def get_orders(self, ic):
resp = requests.get(self.base_url + '/ic/{ic}/orders?access_token={token}'.format(ic=ic, token=self.access_token))
return resp.json().get('orders')
def get_tickets(self, ic):
tickets = []
for order in self.get_orders(ic):
for line_item in order.get('line_items'):
if line_item.get('assignee'):
tickets.append({
'fullname': line_item.get('assignee').get('fullname', ''),
'email': line_item.get('assignee').get('email'),
'phone': line_item.get('assignee').get('phone', ''),
'twitter': extract_twitter_handle(line_item.get('assignee').get('twitter', '')),
'company': line_item.get('assignee').get('company'),
'city': line_item.get('assignee').get('city', ''),
'job_title': line_item.get('assignee').get('jobtitle', ''),
'ticket_no': unicode(line_item.get('line_item_seq')),
'ticket_type': line_item.get('item', {}).get('title'),
'order_no': unicode(order.get('invoice_no')),
})
return tickets
| agpl-3.0 | Python |
13bb256fade89f3882d0a08786f4cb02dbdcaed4 | Fix multidispatch type inference for Group | amolenaar/gaphor,amolenaar/gaphor | gaphor/diagram/grouping.py | gaphor/diagram/grouping.py | """
Grouping functionality allows to nest one item within another item (parent
item). This is useful in several use cases
- artifact deployed within a node
- a class within a package or a component
- composite structures (i.e. component within a node)
The grouping adapters has to implement three methods, see `AbstractGroup`
class.
It is important to note, that grouping adapters can be queried before
instance of an item to be grouped is created. This happens when item
is about to be created. Therefore `AbstractGroup.can_contain` has
to be aware that `AbstractGroup.item` can be null.
"""
from __future__ import annotations
from typing import Type
from typing_extensions import Protocol
import abc
from gaphor import UML
from gaphor.misc.generic.multidispatch import multidispatch, FunctionDispatcher
# TODO: I think this should have been called Namespacing or something similar,
# since that's the modeling concept.
class AbstractGroup(metaclass=abc.ABCMeta):
"""
Base class for grouping UML objects, i.e.
interactions contain lifelines and components contain classes objects.
Base class for grouping UML objects.
:Attributes:
parent
Parent item, which groups other items.
item
Item to be grouped.
"""
def __init__(self, parent: object, item: object) -> None:
self.parent = parent
self.item = item
def can_contain(self) -> bool:
"""
Determine if parent can contain item.
"""
return True
@abc.abstractmethod
def group(self) -> None:
"""
Perform grouping of items.
"""
@abc.abstractmethod
def ungroup(self) -> None:
"""
Perform ungrouping of items.
"""
# Work around issue https://github.com/python/mypy/issues/3135 (Class decorators are not type checked)
# This definition, along with the the ignore below, seems to fix the behaviour for mypy at least.
# @multidispatch(object, object)
class NoGrouping(AbstractGroup):
def can_contain(self) -> bool:
return False
def group(self) -> None:
pass
def ungroup(self) -> None:
pass
Group: FunctionDispatcher[Type[AbstractGroup]] = multidispatch(object, object)(
NoGrouping
)
# Until we can deal with types (esp. typing.Any) we use this as a workaround:
Group.register(None, object)(NoGrouping)
| """
Grouping functionality allows to nest one item within another item (parent
item). This is useful in several use cases
- artifact deployed within a node
- a class within a package or a component
- composite structures (i.e. component within a node)
The grouping adapters has to implement three methods, see `AbstractGroup`
class.
It is important to note, that grouping adapters can be queried before
instance of an item to be grouped is created. This happens when item
is about to be created. Therefore `AbstractGroup.can_contain` has
to be aware that `AbstractGroup.item` can be null.
"""
from __future__ import annotations
from typing import Type
from typing_extensions import Protocol
import abc
from gaphor import UML
from gaphor.misc.generic.multidispatch import multidispatch, FunctionDispatcher
# TODO: I think this should have been called Namespacing or something similar,
# since that's the modeling concept.
class AbstractGroup(metaclass=abc.ABCMeta):
"""
Base class for grouping UML objects, i.e.
interactions contain lifelines and components contain classes objects.
Base class for grouping UML objects.
:Attributes:
parent
Parent item, which groups other items.
item
Item to be grouped.
"""
def __init__(self, parent: object, item: object) -> None:
self.parent = parent
self.item = item
def can_contain(self) -> bool:
"""
Determine if parent can contain item.
"""
return True
@abc.abstractmethod
def group(self) -> None:
"""
Perform grouping of items.
"""
@abc.abstractmethod
def ungroup(self) -> None:
"""
Perform ungrouping of items.
"""
# Work around issue https://github.com/python/mypy/issues/3135 (Class decorators are not type checked)
# This definition, along with the the ignore below, seems to fix the behaviour for mypy at least.
# @multidispatch(object, object)
class NoGrouping(AbstractGroup):
def can_contain(self) -> bool:
return False
def group(self) -> None:
pass
def ungroup(self) -> None:
pass
Group = multidispatch(object, object)(NoGrouping)
# Until we can deal with types (esp. typing.Any) we use this as a workaround:
Group.register(None, object)(NoGrouping)
| lgpl-2.1 | Python |
518711c908eece77f0f989930fdbada5a01797d6 | Convert test_people.py to py.test test functions | edx/repo-tools,edx/repo-tools | tests/test_people.py | tests/test_people.py | """Tests of people.py"""
import datetime
from people import People
SAMPLE_PEOPLE = """\
ned:
institution: edX
before:
2012-10-01:
institution: freelance
2010-12-01:
institution: Hewlett Packard
2007-05-01:
institution: Tabblo
2006-01-09:
institution: Kubi Software
2001-09-24:
institution: Blue Ripple
db:
institution: Optimists United
"""
def test_main_singleton_is_a_singleton():
p1 = People.people()
p2 = People.people()
assert p1 is p2
def test_get_institution():
people = People.from_string(SAMPLE_PEOPLE)
assert people.get("ned")['institution'] == "edX"
assert people.get("db")['institution'] == "Optimists United"
def test_get_non_person():
people = People.from_string(SAMPLE_PEOPLE)
ghost = people.get("ghost")
assert ghost['institution'] == "unsigned"
assert ghost['agreement'] == "none"
def test_history():
people = People.from_string(SAMPLE_PEOPLE)
def ned_then(year):
ned = people.get("ned", datetime.datetime(year, 1, 1))
return ned['institution']
assert ned_then(2015) == "edX"
assert ned_then(2014) == "edX"
assert ned_then(2013) == "edX"
assert ned_then(2012) == "freelance"
assert ned_then(2011) == "freelance"
assert ned_then(2010) == "Hewlett Packard"
assert ned_then(2009) == "Hewlett Packard"
assert ned_then(2008) == "Hewlett Packard"
assert ned_then(2007) == "Tabblo"
assert ned_then(2006) == "Kubi Software"
assert ned_then(2005) == "Kubi Software"
assert ned_then(2004) == "Kubi Software"
assert ned_then(2003) == "Kubi Software"
assert ned_then(2002) == "Kubi Software"
assert ned_then(2001) == "Blue Ripple"
assert ned_then(2000) == "Blue Ripple"
| """Tests of people.py"""
import datetime
import unittest
from people import People
SAMPLE_PEOPLE = """\
ned:
institution: edX
before:
2012-10-01:
institution: freelance
2010-12-01:
institution: Hewlett Packard
2007-05-01:
institution: Tabblo
2006-01-09:
institution: Kubi Software
2001-09-24:
institution: Blue Ripple
db:
institution: Optimists United
"""
class PeopleTest(unittest.TestCase):
def test_main_singleton_is_a_singleton(self):
p1 = People.people()
p2 = People.people()
self.assertIs(p1, p2)
def test_get_institution(self):
people = People.from_string(SAMPLE_PEOPLE)
self.assertEqual(people.get("ned")['institution'], "edX")
self.assertEqual(people.get("db")['institution'], "Optimists United")
def test_get_non_person(self):
people = People.from_string(SAMPLE_PEOPLE)
ghost = people.get("ghost")
self.assertEqual(ghost['institution'], "unsigned")
self.assertEqual(ghost['agreement'], "none")
def test_history(self):
people = People.from_string(SAMPLE_PEOPLE)
def ned_then(year):
ned = people.get("ned", datetime.datetime(year, 1, 1))
return ned['institution']
self.assertEqual(ned_then(2015), "edX")
self.assertEqual(ned_then(2014), "edX")
self.assertEqual(ned_then(2013), "edX")
self.assertEqual(ned_then(2012), "freelance")
self.assertEqual(ned_then(2011), "freelance")
self.assertEqual(ned_then(2010), "Hewlett Packard")
self.assertEqual(ned_then(2009), "Hewlett Packard")
self.assertEqual(ned_then(2008), "Hewlett Packard")
self.assertEqual(ned_then(2007), "Tabblo")
self.assertEqual(ned_then(2006), "Kubi Software")
self.assertEqual(ned_then(2005), "Kubi Software")
self.assertEqual(ned_then(2004), "Kubi Software")
self.assertEqual(ned_then(2003), "Kubi Software")
self.assertEqual(ned_then(2002), "Kubi Software")
self.assertEqual(ned_then(2001), "Blue Ripple")
self.assertEqual(ned_then(2000), "Blue Ripple")
| apache-2.0 | Python |
60056d8573d4c8a9f69dc7f3e41af91ca79c9e8b | Remove an unnecessary call to pyramid.threadlocal.get_current_request() | fedora-infra/github2fedmsg,pombredanne/github2fedmsg,fedora-infra/github2fedmsg,pombredanne/github2fedmsg | github2fedmsg/traversal.py | github2fedmsg/traversal.py | from hashlib import md5
import tw2.core as twc
import github2fedmsg.models
import github2fedmsg.widgets
from pyramid.security import authenticated_userid
def make_root(request):
return RootApp(request)
class RootApp(dict):
__name__ = None
__parent__ = None
def __init__(self, request):
dict.__init__(self)
self.request = request
self.static = dict(
api=ApiApp(),
)
def __getitem__(self, key):
if key in self.static:
return self.static[key]
query = github2fedmsg.models.User.query.filter_by(username=key)
if query.count() != 1:
raise KeyError("No such user")
user = query.one()
# TODO -- use __acl__ machinery some day
userid = authenticated_userid(self.request)
# TODO -- check if this is an org that I own
show_buttons = (userid == user.username)
return UserApp(user=query.one(), show_buttons=show_buttons)
class ApiApp(object):
def __getitem__(self, key):
query = github2fedmsg.models.User.query.filter_by(username=key)
if query.count() != 1:
raise KeyError("No such user")
return query.one()
class UserApp(github2fedmsg.widgets.UserProfile):
__name__ = None
__parent__ = RootApp
@classmethod
def __getitem__(self, key):
for repo in self.user.repos:
if repo.name == key:
return repo
raise KeyError
class APISuccess(object):
def __init__(self, data):
self.data = data
| from hashlib import md5
import tw2.core as twc
import github2fedmsg.models
import github2fedmsg.widgets
import pyramid.threadlocal
from pyramid.security import authenticated_userid
def make_root(request):
return RootApp(request)
class RootApp(dict):
__name__ = None
__parent__ = None
def __init__(self, request):
dict.__init__(self)
self.request = request
self.static = dict(
api=ApiApp(),
)
def __getitem__(self, key):
if key in self.static:
return self.static[key]
query = github2fedmsg.models.User.query.filter_by(username=key)
if query.count() != 1:
raise KeyError("No such user")
user = query.one()
# TODO -- use __acl__ machinery some day
request = pyramid.threadlocal.get_current_request()
userid = authenticated_userid(request)
# TODO -- check if this is an org that I own
show_buttons = (userid == user.username)
return UserApp(user=query.one(), show_buttons=show_buttons)
class ApiApp(object):
def __getitem__(self, key):
query = github2fedmsg.models.User.query.filter_by(username=key)
if query.count() != 1:
raise KeyError("No such user")
return query.one()
class UserApp(github2fedmsg.widgets.UserProfile):
__name__ = None
__parent__ = RootApp
@classmethod
def __getitem__(self, key):
for repo in self.user.repos:
if repo.name == key:
return repo
raise KeyError
class APISuccess(object):
def __init__(self, data):
self.data = data
| agpl-3.0 | Python |
9812fce48153955e179755ea7a58413c3bee182f | Update stamp.py | ralphwetzel/theonionbox,ralphwetzel/theonionbox,ralphwetzel/theonionbox,ralphwetzel/theonionbox,ralphwetzel/theonionbox | theonionbox/stamp.py | theonionbox/stamp.py | __title__ = 'The Onion Box'
__description__ = 'Dashboard to monitor Tor node operations.'
__version__ = '20.2'
__stamp__ = '20200119|095654'
| __title__ = 'The Onion Box'
__description__ = 'Dashboard to monitor Tor node operations.'
__version__ = '20.2rc1'
__stamp__ = '20200119|095654' | mit | Python |
56bc9c79522fd534f2a756bd5a18193635e2adae | Fix missing mock and rename variable | gogoair/foremast,gogoair/foremast | tests/test_default_security_groups.py | tests/test_default_security_groups.py | """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| apache-2.0 | Python |
6cf4901344033b50c6e56a9c878a7e89f33d3880 | Fix 2to3 fixers to work with Python 3. | ProgVal/Limnoria-test,Ban3/Limnoria,ProgVal/Limnoria-test,Ban3/Limnoria | 2to3/fix_reload.py | 2to3/fix_reload.py | # Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', 'reload', node)
return node
| # Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', u'reload', node)
return node
| bsd-3-clause | Python |
c1a291c362df384b11bdc9b829b75c5ecd505aa9 | add config for new session cam | BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH | Utils/py/GoPro/config.py | Utils/py/GoPro/config.py | from goprocam.constants import *
# NaoCam / a1b0a1b0a1
# NAOCAM_2 / a1b0a1b0a1
# NAOCAM_3 / a1b0a1b0a1
# GP26329941 / family3887 / D6:B9:D4:D7:B7:40
# GP26297683 / epic0546 / F8:D2:E9:F0:AC:0B
ssid = 'GP26329941'
passwd = 'family3887'
mac = 'D6:B9:D4:D7:B7:40'
retries = -1
# if the following configs are set, the cam tries to set them before starting recording
# if you want to configure these setting manually, comment them out
fps = Video.FrameRate.FR30
fov = Video.Fov.Wide
resolution = Video.Resolution.R1080p
# all available teams
teams = {
0: "Invisibles",
1: "UT Austin Villa",
2: "Austrian Kangaroos",
3: "Bembelbots",
4: "Berlin United",
5: "B-Human",
6: "Cerberus",
7: "DAInamite",
8: "Dutch Nao Team",
9: "Edinferno",
10: "Kouretes",
11: "MiPal",
12: "Nao Devils Dortmund",
13: "Nao-Team HTWK",
14: "Northern Bites",
15: "NTU RoboPAL",
16: "RoboCanes",
17: "RoboEireann",
18: "UNSW Sydney",
19: "SPQR Team",
20: "TJArk",
21: "UChile Robotics Team",
22: "UPennalizers",
23: "Crude Scientists",
24: "HULKs",
26: "MRL-SPL",
27: "Philosopher",
28: "Rimal Team",
29: "SpelBots",
30: "Team-NUST",
31: "UnBeatables",
32: "UTH-CAR",
33: "NomadZ",
34: "SPURT",
35: "Blue Spider",
36: "Camellia Dragons",
37: "JoiTech-SPL",
38: "Linköping Humanoids",
39: "WrightOcean",
40: "Mars",
41: "Aztlan Team",
42: "CMSingle",
43: "TeamSP",
44: "Luxembourg United",
90: "DoBerMan",
91: "B-HULKs",
92: "Swift-Ark",
93: "Team USA"
} | from goprocam.constants import *
# NaoCam / a1b0a1b0a1
# NAOCAM_2 / a1b0a1b0a1
# GP26329941 / family3887 / D6:B9:D4:D7:B7:40
# GP26297683 / epic0546 / F8:D2:E9:F0:AC:0B
ssid = 'GP26329941'
passwd = 'family3887'
mac = 'D6:B9:D4:D7:B7:40'
retries = -1
# if the following configs are set, the cam tries to set them before starting recording
# if you want to configure these setting manually, comment them out
fps = Video.FrameRate.FR30
fov = Video.Fov.Wide
resolution = Video.Resolution.R1080p
# all available teams
teams = {
0: "Invisibles",
1: "UT Austin Villa",
2: "Austrian Kangaroos",
3: "Bembelbots",
4: "Berlin United",
5: "B-Human",
6: "Cerberus",
7: "DAInamite",
8: "Dutch Nao Team",
9: "Edinferno",
10: "Kouretes",
11: "MiPal",
12: "Nao Devils Dortmund",
13: "Nao-Team HTWK",
14: "Northern Bites",
15: "NTU RoboPAL",
16: "RoboCanes",
17: "RoboEireann",
18: "UNSW Sydney",
19: "SPQR Team",
20: "TJArk",
21: "UChile Robotics Team",
22: "UPennalizers",
23: "Crude Scientists",
24: "HULKs",
26: "MRL-SPL",
27: "Philosopher",
28: "Rimal Team",
29: "SpelBots",
30: "Team-NUST",
31: "UnBeatables",
32: "UTH-CAR",
33: "NomadZ",
34: "SPURT",
35: "Blue Spider",
36: "Camellia Dragons",
37: "JoiTech-SPL",
38: "Linköping Humanoids",
39: "WrightOcean",
40: "Mars",
41: "Aztlan Team",
42: "CMSingle",
43: "TeamSP",
44: "Luxembourg United",
90: "DoBerMan",
91: "B-HULKs",
92: "Swift-Ark",
93: "Team USA"
} | apache-2.0 | Python |
fb1d55de93ae2ef49feea278a4c99013a690c858 | Fix unicode_literals issue | theatlantic/django-south,theatlantic/django-south | south/utils/__init__.py | south/utils/__init__.py | """
Generally helpful utility functions.
"""
def _ask_for_it_by_name(name):
"Returns an object referenced by absolute path."
bits = str(name).split(".")
## what if there is no absolute reference?
if len(bits) > 1:
modulename = ".".join(bits[:-1])
else:
modulename = bits[0]
module = __import__(modulename, {}, {}, bits[-1])
if len(bits) == 1:
return module
else:
return getattr(module, bits[-1])
def ask_for_it_by_name(name):
"Returns an object referenced by absolute path. (Memoised outer wrapper)"
if name not in ask_for_it_by_name.cache:
ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name)
return ask_for_it_by_name.cache[name]
ask_for_it_by_name.cache = {}
def get_attribute(item, attribute):
"""
Like getattr, but recursive (i.e. you can ask for 'foo.bar.yay'.)
"""
value = item
for part in attribute.split("."):
value = getattr(value, part)
return value
def auto_through(field):
"Returns if the M2M class passed in has an autogenerated through table or not."
return (
# Django 1.0/1.1
(not field.rel.through)
or
# Django 1.2+
getattr(getattr(field.rel.through, "_meta", None), "auto_created", False)
)
def auto_model(model):
"Returns if the given model was automatically generated."
return getattr(model._meta, "auto_created", False)
def memoize(function):
"Standard memoization decorator."
name = function.__name__
_name = '_' + name
def method(self):
if not hasattr(self, _name):
value = function(self)
setattr(self, _name, value)
return getattr(self, _name)
def invalidate():
if hasattr(method, _name):
delattr(method, _name)
method.__name__ = function.__name__
method.__doc__ = function.__doc__
method._invalidate = invalidate
return method
| """
Generally helpful utility functions.
"""
def _ask_for_it_by_name(name):
"Returns an object referenced by absolute path."
bits = name.split(".")
## what if there is no absolute reference?
if len(bits)>1:
modulename = ".".join(bits[:-1])
else:
modulename=bits[0]
module = __import__(modulename, {}, {}, bits[-1])
if len(bits) == 1:
return module
else:
return getattr(module, bits[-1])
def ask_for_it_by_name(name):
"Returns an object referenced by absolute path. (Memoised outer wrapper)"
if name not in ask_for_it_by_name.cache:
ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name)
return ask_for_it_by_name.cache[name]
ask_for_it_by_name.cache = {}
def get_attribute(item, attribute):
"""
Like getattr, but recursive (i.e. you can ask for 'foo.bar.yay'.)
"""
value = item
for part in attribute.split("."):
value = getattr(value, part)
return value
def auto_through(field):
"Returns if the M2M class passed in has an autogenerated through table or not."
return (
# Django 1.0/1.1
(not field.rel.through)
or
# Django 1.2+
getattr(getattr(field.rel.through, "_meta", None), "auto_created", False)
)
def auto_model(model):
"Returns if the given model was automatically generated."
return getattr(model._meta, "auto_created", False)
def memoize(function):
"Standard memoization decorator."
name = function.__name__
_name = '_' + name
def method(self):
if not hasattr(self, _name):
value = function(self)
setattr(self, _name, value)
return getattr(self, _name)
def invalidate():
if hasattr(method, _name):
delattr(method, _name)
method.__name__ = function.__name__
method.__doc__ = function.__doc__
method._invalidate = invalidate
return method
| apache-2.0 | Python |
ec5d0bde476e71ff26a6f249017abef86781144f | Load 'database_backup' model | adhoc-dev/odoo-infrastructure,bmya/odoo-infrastructure,yelizariev/odoo-infrastructure,dvitme/odoo-infrastructure,online-sanaullah/odoo-infrastructure,ingadhoc/infrastructure,ingadhoc/odoo-infrastructure,fevxie/odoo-infrastructure,aek/odoo-infrastructure,steingabelgaard/odoo-infrastructure | infrastructure/__init__.py | infrastructure/__init__.py | # -*- coding: utf-8 -*-
import command
import database
import database_type
import database_backup
import db_back_up_policy
import db_filter
import environment
import environment_repository
import environment_version
import instance
import instance_host
import mailserver
import partner
import repository
import repository_branch
import server
import server_change
import server_configuration
import server_configuration_command
import server_hostname
import server_repository
import server_service
import service
import service_command
import wizard
| # -*- coding: utf-8 -*-
import command
import database
import database_type
import db_back_up_policy
import db_filter
import environment
import environment_repository
import environment_version
import instance
import instance_host
import mailserver
import partner
import repository
import repository_branch
import server
import server_change
import server_configuration
import server_configuration_command
import server_hostname
import server_repository
import server_service
import service
import service_command
import wizard
| agpl-3.0 | Python |
3bbcdc74ad277c71cdb08bedd5e1da0eeddc436e | Add delete override for Submission model with directory path debug msg | ktalik/django-subeval,ktalik/django-subeval | submission/models.py | submission/models.py | import time
import uuid
import shutil
from django.db import models
from django.template.defaultfilters import filesizeformat
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
import forms
DB_NAME_LENGTH = 100
class Team(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=DB_NAME_LENGTH)
user = models.ForeignKey(User)
passed = models.BooleanField()
avatar = models.ImageField(upload_to='avatars', null=True, blank=True)
def __unicode__(self):
return self.name
def submission_directory_path(instance, filename):
# file will be uploaded to MEDIA_ROOT/submissions/<id>/<filename>
return 'submissions/{0}/{1}'.format(instance.id, filename)
class Submission(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
team = models.ForeignKey(Team)
user = models.ForeignKey(User)
date = models.DateTimeField(default=timezone.now)
package = models.FileField(
upload_to=submission_directory_path,
blank=True,
null=True
)
# User-specified command for code execution
command = models.CharField(max_length=1000)
def __unicode__(self):
return unicode(self.date) + ' ' + unicode(self.id) + \
' - submitted by: ' + unicode(self.team)
def delete(self, *args, **kwargs):
for result in Result.objects.filter(submission_id__exact = self.id):
result.delete()
dir_path = os.path.dirname(self.package.path)
print "Submission directory path to delete: {}".format(dir_path)
# shutil.rmtree(dir_path, ignore_errors=True)
super(Submission, self).delete(*args, **kwargs)
class Result(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
submission_id = models.UUIDField(primary_key=False, default=uuid.uuid4, editable=False)
report = models.CharField(max_length=1000, default='{}')
log = models.CharField(max_length=10000, default='')
def __unicode__(self):
return unicode(self.id) + ' - with report: ' + unicode(self.report)
| import time
import uuid
from django.db import models
from django.template.defaultfilters import filesizeformat
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
import forms
DB_NAME_LENGTH = 100
class Team(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=DB_NAME_LENGTH)
user = models.ForeignKey(User)
passed = models.BooleanField()
avatar = models.ImageField(upload_to='avatars', null=True, blank=True)
def __unicode__(self):
return self.name
def submission_directory_path(instance, filename):
# file will be uploaded to MEDIA_ROOT/submissions/<id>/<filename>
return 'submissions/{0}/{1}'.format(instance.id, filename)
class Submission(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
team = models.ForeignKey(Team)
user = models.ForeignKey(User)
date = models.DateTimeField(default=timezone.now)
package = models.FileField(
upload_to=submission_directory_path,
blank=True,
null=True
)
# User-specified command for code execution
command = models.CharField(max_length=1000)
def __unicode__(self):
return unicode(self.date) + ' ' + unicode(self.id) + \
' - submitted by: ' + unicode(self.team)
class Result(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
submission_id = models.UUIDField(primary_key=False, default=uuid.uuid4, editable=False)
report = models.CharField(max_length=1000, default='{}')
log = models.CharField(max_length=10000, default='')
def __unicode__(self):
return unicode(self.id) + ' - with report: ' + unicode(self.report)
| agpl-3.0 | Python |
515e1dc6fd3741afe8df7ce7a7c94b1ffa2db8c3 | Remove useless imports from `tmserver/__init__.py` | TissueMAPS/TmServer | tmserver/__init__.py | tmserver/__init__.py | # TmServer - TissueMAPS server application.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich and Robin Hafen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from tmserver.version import __version__
from tmserver.config import ServerConfig
cfg = ServerConfig()
| # TmServer - TissueMAPS server application.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich and Robin Hafen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import tmserver.model
import tmserver.serialize
from tmserver.version import __version__
from tmserver.config import ServerConfig
cfg = ServerConfig()
| agpl-3.0 | Python |
d53d344e770bbeeb839323500f526400692e554b | Fix test failing on Python 2.7 | h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3 | h2o-py/tests/testdir_jira/pyunit_pubdev_6394.py | h2o-py/tests/testdir_jira/pyunit_pubdev_6394.py | # -*- coding: utf-8 -*-
from h2o import H2OFrame
from tests import pyunit_utils
def pubdev_6394():
# JUnit tests are to be found in RapidsTest class
data = [['location'],
['X県 A市'],
['X県 B市'],
['X県 B市'],
['Y県 C市'],
['Y県 C市']]
originalFrame = H2OFrame(data, header=True, column_types=['enum'])
assert originalFrame.type('location') == 'enum'
assert originalFrame.categories() == [u'X県 A市', u'X県 B市', u'Y県 C市']
# Reduce cardinality of 'location' column to 2 by reducing existing categorical values to ['X県','Y県']
expectedCategories = [u'X県', u'Y県']
transformedFrame = originalFrame['location'].gsub(' .*', '')
print(transformedFrame)
assert transformedFrame.ncols == 1
assert transformedFrame.nrows == originalFrame.nrows
assert transformedFrame.type('C1') == 'enum'
assert transformedFrame['C1'].categories() == expectedCategories
# Test gsub without changing the cardinality
data = [['location'],
['ab'],
['ac'],
['ad'],
['ae'],
['af']]
originalFrame = H2OFrame(data, header=True, column_types=['enum'])
assert originalFrame.type('location') == 'enum'
assert originalFrame.categories() == ['ab', 'ac', 'ad', 'ae', 'af']
expectedCategories = ['b', 'c', 'd', 'e', 'f']
transformedFrame = originalFrame['location'].gsub('a', '')
print(transformedFrame)
assert transformedFrame.ncols == 1
assert transformedFrame.nrows == originalFrame.nrows
assert transformedFrame.type('C1') == 'enum'
assert transformedFrame['C1'].categories() == expectedCategories
if __name__ == "__main__":
pyunit_utils.standalone_test(pubdev_6394)
else:
pubdev_6394()
| from h2o import H2OFrame
from tests import pyunit_utils
def pubdev_6394():
# JUnit tests are to be found in RapidsTest class
data = [['location'],
['X県 A市'],
['X県 B市'],
['X県 B市'],
['Y県 C市'],
['Y県 C市']]
originalFrame = H2OFrame(data, header=True, column_types=['enum'])
assert originalFrame.type('location') == 'enum'
assert originalFrame.categories() == ['X県 A市', 'X県 B市', 'Y県 C市']
# Reduce cardinality of 'location' column to 2 by reducing existing categorical values to ['X県','Y県']
expectedCategories = ['X県', 'Y県']
transformedFrame = originalFrame['location'].gsub(' .*', '')
print(transformedFrame)
assert transformedFrame.ncols == 1
assert transformedFrame.nrows == originalFrame.nrows
assert transformedFrame.type('C1') == 'enum'
assert transformedFrame['C1'].categories() == expectedCategories
# Test gsub without changing the cardinality
data = [['location'],
['ab'],
['ac'],
['ad'],
['ae'],
['af']]
originalFrame = H2OFrame(data, header=True, column_types=['enum'])
assert originalFrame.type('location') == 'enum'
assert originalFrame.categories() == ['ab', 'ac','ad', 'ae', 'af']
expectedCategories = ['b', 'c', 'd', 'e', 'f']
transformedFrame = originalFrame['location'].gsub('a', '')
print(transformedFrame)
assert transformedFrame.ncols == 1
assert transformedFrame.nrows == originalFrame.nrows
assert transformedFrame.type('C1') == 'enum'
assert transformedFrame['C1'].categories() == expectedCategories
if __name__ == "__main__":
pyunit_utils.standalone_test(pubdev_6394)
else:
pubdev_6394()
| apache-2.0 | Python |
6e000645b909970416fbe9f1b37af658eee5202a | Update event.py | davidbstein/moderator,davidbstein/moderator,davidbstein/moderator,davidbstein/moderator | src/model/event.py | src/model/event.py | import secure
from model.helpers import (
r2d,
DB,
PermissionError,
)
from model.user import User
from model.org import Org
class Event:
def __init__(self):
raise Exception("This class is a db wrapper and should not be instantiated.")
@classmethod
def get(cls, event_id, user_email, **__):
user = User.get(user_email)
query = DB.events.select(
(DB.events.columns.id==event_id) &
(DB.events.columns.domain==user['domain'])
)
event = DB.ex(query).fetchone()
if not event:
raise PermissionError("No event found in the set of events user has access to")
return r2d(event)
@classmethod
def get_all_for_domain(cls, domain, override_auth=False):
assert override_auth
query = DB.events.select(
(DB.events.columns.domain==domain) &
(DB.events.columns.visible==1)
)
return map(r2d, DB.ex(query))
@classmethod
def lookup(cls, event_lookup_id, user_email=None, override_auth=False):
query = DB.events.select(
DB.events.columns.lookup_id==event_lookup_id
)
event = DB.ex(query).fetchone()
if not override_auth:
if event.domain != User.get(user_email)['domain']:
raise PermissionError("No event found in the set of events user has access to")
return r2d(event)
@classmethod
def create(cls, title, user_email, description=None, **__):
title = title
user = User.get(user_email)
org = Org.get(user['domain'], user_email)
unique_hash = "".join(str(hex(ord(b)))[2:] for b in secure.token_bytes(16))
new_event = dict(
owner_email=user_email,
domain=user['domain'],
lookup_id=unique_hash,
moderators=list(set(org['moderators'] + [user_email])),
title=title,
description=description or title,
)
command = DB.events.insert(new_event).returning(*DB.events.columns)
return r2d(DB.ex(command).fetchone())
@classmethod
def update(cls, event_id, desc=None, moderators=None, **__):
raise NotImplementedError()
| import secure
from model.helpers import (
r2d,
DB,
PermissionError,
)
from model.user import User
from model.org import Org
class Event:
def __init__(self):
raise Exception("This class is a db wrapper and should not be instantiated.")
@classmethod
def get(cls, event_id, user_email, **__):
user = User.get(user_email)
query = DB.events.select(
(DB.events.columns.id==event_id) &
(DB.events.columns.domain==user['domain'])
)
event = DB.ex(query).fetchone()
if not event:
raise PermissionError("No event found in the set of events user has access to")
return r2d(event)
@classmethod
def get_all_for_domain(cls, domain, override_auth=False):
assert override_auth
query = DB.events.select(
(DB.events.columns.domain==domain) &
(DB.events.columns.visible==1)
)
return map(r2d, DB.ex(query))
@classmethod
def lookup(cls, event_lookup_id, user_email=None, override_auth=False):
query = DB.events.select(
DB.events.columns.lookup_id==event_lookup_id
)
event = DB.ex(query).fetchone()
if not override_auth:
if event.domain != User.get(user_email)['domain']:
raise PermissionError("No event found in the set of events user has access to")
return r2d(event)
@classmethod
def create(cls, title, user_email, description=None, **__):
title = title
user = User.get(user_email)
org = Org.get(user['domain'], user_email)
unique_hash = "".join(str(hex(ord(b)))[2:] for b in token_bytes(16))
new_event = dict(
owner_email=user_email,
domain=user['domain'],
lookup_id=unique_hash,
moderators=list(set(org['moderators'] + [user_email])),
title=title,
description=description or title,
)
command = DB.events.insert(new_event).returning(*DB.events.columns)
return r2d(DB.ex(command).fetchone())
@classmethod
def update(cls, event_id, desc=None, moderators=None, **__):
raise NotImplementedError()
| mit | Python |
1ddfee2d2db59e8f188e26daf098186600cebd50 | add __repr__() | TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl | AlphaTwirl/ProgressBar/ProgressBar.py | AlphaTwirl/ProgressBar/ProgressBar.py | # Tai Sakuma <tai.sakuma@cern.ch>
import time
import sys, collections
##__________________________________________________________________||
class ProgressBar(object):
def __init__(self):
self.reports = collections.OrderedDict()
self.lines = [ ]
self.interval = 0.1 # [second]
self._readTime()
def __repr__(self):
return '{}()'.format(
self.__class__.__name__
)
def nreports(self):
return len(self.reports)
def present(self, report):
self.reports[report.taskid] = report
if not self._need_to_update(report): return
self._delete_previous_lines()
self._create_lines()
self._print_lines()
self._readTime()
def _delete_previous_lines(self):
if len(self.lines) >= 1:
sys.stdout.write('\b'*len(self.lines[-1]))
if len(self.lines) >= 2:
sys.stdout.write('\033M'*(len(self.lines) - 1))
self.lines = [ ]
self.last = [ ]
def _create_lines(self):
for taskid, report in self.reports.items():
line = self.createLine(report)
if report.done >= report.total:
del self.reports[report.taskid]
self.last.append(line)
else:
self.lines.append(line)
def _print_lines(self):
if len(self.last) > 0: sys.stdout.write("\n".join(self.last) + "\n")
sys.stdout.write("\n".join(self.lines))
sys.stdout.flush()
def createLine(self, report):
nameFieldLength = 32
percent = float(report.done)/report.total if report.total > 0 else 1
bar = (':' * int(percent * 40)).ljust(40, " ")
percent = round(percent * 100, 2)
name = report.name[0:nameFieldLength]
return " {3:6.2f}% {2:s} | {4:8d} / {5:8d} |: {0:<{1}s} ".format(name, nameFieldLength, bar, percent, report.done, report.total)
def _need_to_update(self, report):
if self._time() - self.lastTime > self.interval: return True
if report.done == report.total: return True
if report.done == 0: return True
return False
def _time(self): return time.time()
def _readTime(self): self.lastTime = self._time()
##__________________________________________________________________||
| # Tai Sakuma <tai.sakuma@cern.ch>
import time
import sys, collections
##__________________________________________________________________||
class ProgressBar(object):
def __init__(self):
self.reports = collections.OrderedDict()
self.lines = [ ]
self.interval = 0.1 # [second]
self._readTime()
def nreports(self):
return len(self.reports)
def present(self, report):
self.reports[report.taskid] = report
if not self._need_to_update(report): return
self._delete_previous_lines()
self._create_lines()
self._print_lines()
self._readTime()
def _delete_previous_lines(self):
if len(self.lines) >= 1:
sys.stdout.write('\b'*len(self.lines[-1]))
if len(self.lines) >= 2:
sys.stdout.write('\033M'*(len(self.lines) - 1))
self.lines = [ ]
self.last = [ ]
def _create_lines(self):
for taskid, report in self.reports.items():
line = self.createLine(report)
if report.done >= report.total:
del self.reports[report.taskid]
self.last.append(line)
else:
self.lines.append(line)
def _print_lines(self):
if len(self.last) > 0: sys.stdout.write("\n".join(self.last) + "\n")
sys.stdout.write("\n".join(self.lines))
sys.stdout.flush()
def createLine(self, report):
nameFieldLength = 32
percent = float(report.done)/report.total if report.total > 0 else 1
bar = (':' * int(percent * 40)).ljust(40, " ")
percent = round(percent * 100, 2)
name = report.name[0:nameFieldLength]
return " {3:6.2f}% {2:s} | {4:8d} / {5:8d} |: {0:<{1}s} ".format(name, nameFieldLength, bar, percent, report.done, report.total)
def _need_to_update(self, report):
if self._time() - self.lastTime > self.interval: return True
if report.done == report.total: return True
if report.done == 0: return True
return False
def _time(self): return time.time()
def _readTime(self): self.lastTime = self._time()
##__________________________________________________________________||
| bsd-3-clause | Python |
28bf23ef6e76c076243153affec2a4ccef04c306 | add util to print lexer output. | abadger/Bento,abadger/Bento,cournape/Bento,cournape/Bento,abadger/Bento,cournape/Bento,abadger/Bento,cournape/Bento | toydist/core/parser/utils.py | toydist/core/parser/utils.py | # Generator to enable "peeking" the next item:
# >>> a = [1, 2, 3, 4]
# >>> peeker = Peeker(a)
# >>> for i in peeker:
# >>> try:
# >>> next = peeker.peek()
# >>> print "Next to %d is %d" % (i, next)
# >>> except StopIteration:
# >>> print "End of stream", i
# >>>
class Peeker(object):
def __init__(self, it, dummy=None):
self._it = iter(it)
self._cache = None
if dummy is None:
self.peek = self._peek_no_dummy
else:
self.peek = self._peek_dummy
self._dummy = dummy
def next(self):
if self._cache:
i = self._cache
self._cache = None
return i
else:
return self._it.next()
#self._cache = None
#return i
def _peek_dummy(self):
if self._cache:
return self._cache
else:
try:
i = self._it.next()
except Exception, e:
return self._dummy
self._cache = i
return i
def _peek_no_dummy(self):
if self._cache:
return self._cache
else:
i = self._it.next()
self._cache = i
return i
def __iter__(self):
return self
def print_tokens_simple(lexer):
while True:
tok = lexer.token()
if not tok:
break
print tok
| # Generator to enable "peeking" the next item:
# >>> a = [1, 2, 3, 4]
# >>> peeker = Peeker(a)
# >>> for i in peeker:
# >>> try:
# >>> next = peeker.peek()
# >>> print "Next to %d is %d" % (i, next)
# >>> except StopIteration:
# >>> print "End of stream", i
# >>>
class Peeker(object):
def __init__(self, it, dummy=None):
self._it = iter(it)
self._cache = None
if dummy is None:
self.peek = self._peek_no_dummy
else:
self.peek = self._peek_dummy
self._dummy = dummy
def next(self):
if self._cache:
i = self._cache
self._cache = None
return i
else:
return self._it.next()
#self._cache = None
#return i
def _peek_dummy(self):
if self._cache:
return self._cache
else:
try:
i = self._it.next()
except Exception, e:
return self._dummy
self._cache = i
return i
def _peek_no_dummy(self):
if self._cache:
return self._cache
else:
i = self._it.next()
self._cache = i
return i
def __iter__(self):
return self
| bsd-3-clause | Python |
b438da2080f06e319ecd70fc771a934e3ce53044 | Fix Organization API reference | jphnoel/udata,etalab/udata,grouan/udata,davidbgk/udata,opendatateam/udata,etalab/udata,grouan/udata,jphnoel/udata,opendatateam/udata,grouan/udata,davidbgk/udata,jphnoel/udata,etalab/udata,davidbgk/udata,opendatateam/udata | udata/core/organization/api_fields.py | udata/core/organization/api_fields.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from flask import url_for
from udata.api import api, pager, fields
from .models import ORG_ROLES, MEMBERSHIP_STATUS
@api.model(fields={
'id': fields.String(description='The organization identifier', required=True),
'name': fields.String(description='The organization name', required=True),
'uri': fields.String(description='The organization API URI', required=True),
'page': fields.String(description='The organization web page URL', required=True),
'image_url': fields.String(description='The organization logo URL'),
})
class OrganizationReference(fields.Raw):
def format(self, organization):
return {
'id': str(organization.id),
'uri': url_for('api.organization', org=organization, _external=True),
'page': url_for('organizations.show', org=organization, _external=True),
'name': organization.name,
'logo': organization.logo(external=True),
}
from udata.core.user.api_fields import UserReference
request_fields = api.model('MembershipRequest', {
'status': fields.String(description='The current request status', required=True,
enum=MEMBERSHIP_STATUS.keys()),
'comment': fields.String(description='A request comment from the user', required=True),
})
member_fields = api.model('Member', {
'user': UserReference,
'role': fields.String(description='The member role in the organization', required=True,
enum=ORG_ROLES.keys())
})
org_fields = api.model('Organization', {
'id': fields.String(description='The organization identifier', required=True),
'name': fields.String(description='The organization name', required=True),
'slug': fields.String(description='The organization string used as permalink', required=True),
'description': fields.String(description='The organization description in Markdown', required=True),
'created_at': fields.ISODateTime(description='The organization creation date', required=True),
'last_modified': fields.ISODateTime(description='The organization last modification date', required=True),
'deleted': fields.ISODateTime(description='The organization deletion date if deleted'),
'metrics': fields.Raw(description='The organization metrics'),
'uri': fields.UrlFor('api.organization', lambda o: {'org': o},
description='The organization API URI', required=True),
'page': fields.UrlFor('organizations.show', lambda o: {'org': o},
description='The organization page URL', required=True),
'logo': fields.ImageField(description='The organization logo URLs'),
'members': api.as_list(fields.Nested(member_fields, description='The organization members')),
})
org_page_fields = api.model('OrganizationPage', pager(org_fields))
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from flask import url_for
from udata.api import api, pager, fields
from .models import ORG_ROLES, MEMBERSHIP_STATUS
@api.model(fields={
'id': fields.String(description='The organization identifier', required=True),
'name': fields.String(description='The organization name', required=True),
'uri': fields.String(description='The organization API URI', required=True),
'page': fields.String(description='The organization web page URL', required=True),
'image_url': fields.String(description='The organization logo URL'),
})
class OrganizationReference(fields.Raw):
def format(self, organization):
return {
'id': str(organization.id),
'uri': url_for('api.organization', org=organization, _external=True),
'page': url_for('organizations.show', org=organization, _external=True),
'image_url': organization.image_url,
'name': organization.name,
'logo': str(organization.logo),
}
from udata.core.user.api_fields import UserReference
request_fields = api.model('MembershipRequest', {
'status': fields.String(description='The current request status', required=True,
enum=MEMBERSHIP_STATUS.keys()),
'comment': fields.String(description='A request comment from the user', required=True),
})
member_fields = api.model('Member', {
'user': UserReference,
'role': fields.String(description='The member role in the organization', required=True,
enum=ORG_ROLES.keys())
})
org_fields = api.model('Organization', {
'id': fields.String(description='The organization identifier', required=True),
'name': fields.String(description='The organization name', required=True),
'slug': fields.String(description='The organization string used as permalink', required=True),
'description': fields.String(description='The organization description in Markdown', required=True),
'created_at': fields.ISODateTime(description='The organization creation date', required=True),
'last_modified': fields.ISODateTime(description='The organization last modification date', required=True),
'deleted': fields.ISODateTime(description='The organization deletion date if deleted'),
'metrics': fields.Raw(description='The organization metrics'),
'uri': fields.UrlFor('api.organization', lambda o: {'org': o},
description='The organization API URI', required=True),
'page': fields.UrlFor('organizations.show', lambda o: {'org': o},
description='The organization page URL', required=True),
'logo': fields.ImageField(description='The organization logo URLs'),
'members': api.as_list(fields.Nested(member_fields, description='The organization members')),
})
org_page_fields = api.model('OrganizationPage', pager(org_fields))
| agpl-3.0 | Python |
9bac47925e604a92df068f8644559741dc24769c | clean up | JonathanPetit/MovieSerieTorrent | src/renamer.py | src/renamer.py | from parser import Parser
import os
class Renamer:
def __init__(self):
self.infos = None
self.excess = None
self.parse_file = None
self.rename_file = []
self.compteur = 0
self.filename = None
def rename(self, files):
self.parse_file = Parser().parse(files)
self.infos = self.parse_file[0]
self.excess = self.parse_file[1]
self.rename_file = ['{title}', ' ({year})', '-{languages}-', '.{extension}']
for elements in self.rename_file:
try:
self.rename_file[self.compteur] = self.rename_file[self.compteur].format(**self.infos)
except KeyError:
self.rename_file[self.compteur] = ''
self.compteur +=1
for element in self.rename_file:
if element == '':
self.rename_file.remove('')
self.filename = ''.join(self.rename_file)
print(self.filename)
if __name__ == '__main__':
path = os.listdir('/Users/Jonh/Movies/Traitement')
for files in path:
if files.endswith('.DS_Store'):
pass
else:
Renamer().rename(files)
| from parser import Parser
import os
class Renamer:
def __init__(self):
self.infos = None
self.excess = None
self.parse_file = None
self.rename_file = []
self.compteur = 0
self.filename = None
def rename(self, files):
self.parse_file = Parser().parse(files)
self.infos = self.parse_file[0]
self.excess = self.parse_file[1]
self.rename_file = ['{title}', ' ({year})', '-{languages}-', '.{extension}']
for elements in self.rename_file:
try:
self.rename_file[self.compteur] = self.rename_file[self.compteur].format(**self.infos)
except KeyError:
self.rename_file[self.compteur] = ''
self.compteur +=1
for element in self.rename_file:
if element == '':
self.rename_file.remove('')
self.filename = ''.join(self.rename_file)
print(self.filename)
if __name__ == '__main__':
path = os.listdir('/Users/Jonh/Movies/Traitement')
for files in path:
if files.endswith('.DS_Store'):
pass
else:
Renamer().rename(files)
| mit | Python |
c6df07ce73063ab8bbf1ed9660fa6d37580ab2f1 | Update version | woefe/studip-sync,popeye123/studip-sync | studip_sync/__init__.py | studip_sync/__init__.py | """Stud.IP file synchronization tool.
A command line tool that keeps track of new files on Stud.IP and downloads them to your computer.
"""
__license__ = "Unlicense"
__version__ = "2.0.0"
__author__ = __maintainer__ = "Wolfgang Popp"
__email__ = "mail@wolfgang-popp.de"
def _get_config_path():
import os
prefix = os.environ.get("XDG_CONFIG_HOME") or "~/.config"
path = os.path.join(prefix, "studip-sync/")
return os.path.expanduser(path)
def get_config_file():
import os
from studip_sync.arg_parser import ARGS
from studip_sync.constants import CONFIG_FILENAME
if ARGS.config:
return ARGS.config
else:
return os.path.join(CONFIG_PATH, CONFIG_FILENAME)
CONFIG_PATH = _get_config_path()
| """Stud.IP file synchronization tool.
A command line tool that keeps track of new files on Stud.IP and downloads them to your computer.
"""
__license__ = "Unlicense"
__version__ = "0.4.0"
__author__ = __maintainer__ = "Wolfgang Popp"
__email__ = "mail@wolfgang-popp.de"
def _get_config_path():
import os
prefix = os.environ.get("XDG_CONFIG_HOME") or "~/.config"
path = os.path.join(prefix, "studip-sync/")
return os.path.expanduser(path)
def get_config_file():
import os
from studip_sync.arg_parser import ARGS
from studip_sync.constants import CONFIG_FILENAME
if ARGS.config:
return ARGS.config
else:
return os.path.join(CONFIG_PATH, CONFIG_FILENAME)
CONFIG_PATH = _get_config_path()
| unlicense | Python |
57882e6754f6d5897690c97dfda2db371989206e | Fix test ModifiedHamiltonianExchange resuming | andrrizzi/yank,andrrizzi/yank,andrrizzi/yank,choderalab/yank,choderalab/yank | Yank/tests/test_sampling.py | Yank/tests/test_sampling.py | #!/usr/local/bin/env python
"""
Test sampling.py facility.
"""
# ==============================================================================
# GLOBAL IMPORTS
# ==============================================================================
from openmmtools import testsystems
from mdtraj.utils import enter_temp_directory
from yank.sampling import *
# ==============================================================================
# TESTS
# ==============================================================================
def test_resuming():
"""Test that sampling correctly resumes."""
# Prepare ModifiedHamiltonianExchange arguments
toluene_test = testsystems.TolueneImplicit()
ligand_atoms = range(15)
alchemical_factory = AbsoluteAlchemicalFactory(toluene_test.system,
ligand_atoms=ligand_atoms,
softcore_beta=0.0)
base_state = ThermodynamicState(temperature=300.0*unit.kelvin)
base_state.system = alchemical_factory.alchemically_modified_system
alchemical_state1 = AlchemicalState(lambda_electrostatics=1.0, lambda_sterics=1.0)
alchemical_state0 = AlchemicalState(lambda_electrostatics=0.0, lambda_sterics=0.0)
alchemical_states = [alchemical_state1, alchemical_state0]
positions = toluene_test.positions
# We pass as reference_LJ_state and reference_LJ_state the normal
# reference state as we just want to check that they are correctly
# set on resume
reference_state = ThermodynamicState(temperature=300.0*unit.kelvin)
reference_state.system = toluene_test.system
reference_LJ_state = copy.deepcopy(base_state)
reference_LJ_expanded_state = copy.deepcopy(base_state)
with enter_temp_directory():
store_file_name = 'simulation.nc'
simulation = ModifiedHamiltonianExchange(store_file_name)
simulation.create(base_state, alchemical_states, positions, mc_atoms=ligand_atoms,
reference_state=reference_state,
reference_LJ_state=reference_LJ_state,
reference_LJ_expanded_state=reference_LJ_expanded_state)
# Clean up simulation and resume
del simulation
simulation = ModifiedHamiltonianExchange(store_file_name)
simulation.resume()
assert simulation.reference_state is not None
assert simulation.reference_LJ_state is not None
assert simulation.reference_LJ_expanded_state is not None
| #!/usr/local/bin/env python
"""
Test sampling.py facility.
"""
# ==============================================================================
# GLOBAL IMPORTS
# ==============================================================================
from openmmtools import testsystems
from mdtraj.utils import enter_temp_directory
from yank.sampling import *
# ==============================================================================
# TESTS
# ==============================================================================
def test_resuming():
"""Test that sampling correctly resumes."""
# Prepare ModifiedHamiltonianExchange arguments
toluene_test = testsystems.TolueneImplicit()
ligand_atoms = range(15)
alchemical_factory = AbsoluteAlchemicalFactory(toluene_test.system,
ligand_atoms=ligand_atoms,
softcore_beta=0.0)
base_state = ThermodynamicState(temperature=300.0*unit.kelvin)
base_state.system = alchemical_factory.alchemically_modified_system
alchemical_state1 = AlchemicalState(lambda_electrostatics=1.0, lambda_sterics=1.0)
alchemical_state0 = AlchemicalState(lambda_electrostatics=0.0, lambda_sterics=0.0)
alchemical_states = [alchemical_state1, alchemical_state0]
positions = toluene_test.positions
# We pass as reference_LJ_state and reference_LJ_state the normal
# reference state as we just want to check that they are correctly
# set on resume
reference_state = ThermodynamicState(temperature=300.0*unit.kelvin)
reference_state.system = toluene_test.system
reference_LJ_state = copy.deepcopy(base_state)
reference_LJ_expanded_state = copy.deepcopy(base_state)
with enter_temp_directory():
store_file_name = 'simulation.nc'
simulation = ModifiedHamiltonianExchange(store_file_name)
simulation.create(base_state, alchemical_states, positions,
reference_state=reference_state,
reference_LJ_state=reference_LJ_state,
reference_LJ_expanded_state=reference_LJ_expanded_state)
# Clean up simulation and resume
del simulation
simulation = ModifiedHamiltonianExchange(store_file_name)
simulation.resume()
assert simulation.reference_state is not None
assert simulation.reference_LJ_state is not None
assert simulation.reference_LJ_expanded_state is not None
| mit | Python |
3ec0f7fa6ee03052118d7d7e6db257f903ce8748 | Fix capitalization to default style. | instana/python-sensor,instana/python-sensor | instana/http_propagator.py | instana/http_propagator.py | from __future__ import absolute_import
import opentracing as ot
from basictracer.context import SpanContext
from instana import util, log
prefix_tracer_state = 'X-Instana-'
field_name_trace_id = prefix_tracer_state + 'T'
field_name_span_id = prefix_tracer_state + 'S'
field_count = 2
class HTTPPropagator():
"""A Propagator for Format.HTTP_HEADERS. """
def inject(self, span_context, carrier):
try:
trace_id = util.id_to_header(span_context.trace_id)
span_id = util.id_to_header(span_context.span_id)
if type(carrier) is dict or hasattr(carrier, "__dict__"):
carrier[field_name_trace_id] = trace_id
carrier[field_name_span_id] = span_id
elif type(carrier) is list:
trace_header = (field_name_trace_id, trace_id)
carrier.append(trace_header)
span_header = (field_name_span_id, span_id)
carrier.append(span_header)
else:
raise Exception("Unsupported carrier type", type(carrier))
except Exception as e:
log.debug("inject error: ", str(e))
def extract(self, carrier): # noqa
try:
if type(carrier) is dict or hasattr(carrier, "__dict__"):
dc = carrier
elif type(carrier) is list:
dc = dict(carrier)
else:
raise ot.SpanContextCorruptedException()
if field_name_trace_id in dc and field_name_span_id in dc:
trace_id = util.header_to_id(dc[field_name_trace_id])
span_id = util.header_to_id(dc[field_name_span_id])
return SpanContext(span_id=span_id,
trace_id=trace_id,
baggage={},
sampled=True)
except Exception as e:
log.debug("extract error: ", str(e))
| from __future__ import absolute_import
import opentracing as ot
from basictracer.context import SpanContext
from instana import util, log
prefix_tracer_state = 'X-INSTANA-'
field_name_trace_id = prefix_tracer_state + 'T'
field_name_span_id = prefix_tracer_state + 'S'
field_count = 2
class HTTPPropagator():
"""A Propagator for Format.HTTP_HEADERS. """
def inject(self, span_context, carrier):
try:
trace_id = util.id_to_header(span_context.trace_id)
span_id = util.id_to_header(span_context.span_id)
if type(carrier) is dict or hasattr(carrier, "__dict__"):
carrier[field_name_trace_id] = trace_id
carrier[field_name_span_id] = span_id
elif type(carrier) is list:
trace_header = (field_name_trace_id, trace_id)
carrier.append(trace_header)
span_header = (field_name_span_id, span_id)
carrier.append(span_header)
else:
raise Exception("Unsupported carrier type", type(carrier))
except Exception as e:
log.debug("inject error: ", str(e))
def extract(self, carrier): # noqa
try:
if type(carrier) is dict or hasattr(carrier, "__dict__"):
dc = carrier
elif type(carrier) is list:
dc = dict(carrier)
else:
raise ot.SpanContextCorruptedException()
if field_name_trace_id in dc and field_name_span_id in dc:
trace_id = util.header_to_id(dc[field_name_trace_id])
span_id = util.header_to_id(dc[field_name_span_id])
return SpanContext(span_id=span_id,
trace_id=trace_id,
baggage={},
sampled=True)
except Exception as e:
log.debug("extract error: ", str(e))
| mit | Python |
53790af64ca601832872d3a21ab8264ce4c9be10 | Update the build version | vlegoff/cocomud | src/version.py | src/version.py | BUILD = 44
| BUILD = 43
| bsd-3-clause | Python |
c54223c1b4dd8701a49dd47d51d0947d858b7f79 | add __unicode__ methods (omg why) | hackerspace-silesia/jakniedojade,hackerspace-silesia/jakniedojade,hackerspace-silesia/jakniedojade | jakniedojade/app/models.py | jakniedojade/app/models.py | from django.db import models
from django_resized import ResizedImageField
class Core(models.Model):
id = models.AutoField(primary_key=True)
last_modified = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
class Image(Core):
name = models.CharField(max_length=80)
image = ResizedImageField(size=[300, 200], upload_to='connects', blank=True, null=True)
def __str__(self):
return self.name or '-'
def __unicode__(self):
return self.name or u'-'
class Connection(Core):
name = models.CharField(max_length=80)
image = models.ForeignKey(Image, blank=True, null=True)
iframe_url = models.TextField(blank=True, null=True)
description = models.TextField(blank=True)
def __str__(self):
return self.name or '-'
def __unicode__(self):
return self.name or u'-'
class Vote(Core):
connection = models.ForeignKey(Connection, related_name='votes')
ip = models.CharField(max_length=45)
user_agent = models.CharField(max_length=128, blank=True)
| from django.db import models
from django_resized import ResizedImageField
class Core(models.Model):
id = models.AutoField(primary_key=True)
last_modified = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
class Image(Core):
name = models.CharField(max_length=80)
image = ResizedImageField(size=[300, 200], upload_to='connects', blank=True, null=True)
def __str__(self):
return self.name or '-'
class Connection(Core):
name = models.CharField(max_length=80)
image = models.ForeignKey(Image, blank=True, null=True)
iframe_url = models.TextField(blank=True, null=True)
description = models.TextField(blank=True)
def __str__(self):
return self.name or '-'
class Vote(Core):
connection = models.ForeignKey(Connection, related_name='votes')
ip = models.CharField(max_length=45)
user_agent = models.CharField(max_length=128, blank=True)
| mit | Python |
6508db3283ec6d419064e679e1315ffa79765010 | Read config from envvar | devxoul/schoool | schoool/app.py | schoool/app.py | # -*- coding: utf-8 -*-
import json
import os
from bs4 import BeautifulSoup
from flask import Flask, request, Response
import requests
from werkzeug.exceptions import default_exceptions
from schoool import cache
from schoool.views import blueprints
def create_app(config=None):
app = Flask(__name__)
if config:
app.config.from_pyfile(os.path.abspath(config))
else:
app.config.from_envvar('CONFIG')
install_errorhandler(app)
register_blueprints(app)
cache.init_app(app)
monkey_patch_response_html()
monkey_patch_response_json()
return app
def register_blueprints(app):
for blueprint_name in blueprints:
path = 'schoool.views.%s' % blueprint_name
view = __import__(path, fromlist=[blueprint_name])
blueprint = getattr(view, 'view')
app.register_blueprint(blueprint)
def install_errorhandler(app):
def errorhandler(err):
accept = request.headers.get('Accept', '')
if 'application/json' in accept:
data = {
'status': err.code,
'name': err.name,
'description': err.description
}
res = json.dumps(data)
return Response(res, mimetype='application/json', status=err.code)
html = "<h1>{0}: {1}</h1><p>{2}</p>".format(err.code, err.name,
err.description)
return Response(html, status=err.code)
for code in default_exceptions.iterkeys():
app.error_handler_spec[None][code] = errorhandler
def monkey_patch_response_html():
@property
def _html(self):
return BeautifulSoup(self.text, 'html.parser')
requests.Response.html = _html
def monkey_patch_response_json():
@property
def _json(self):
return json.loads(self.text)
requests.Response.json = _json
| # -*- coding: utf-8 -*-
import json
import os
from bs4 import BeautifulSoup
from flask import Flask, request, Response
import requests
from werkzeug.exceptions import default_exceptions
from schoool import cache
from schoool.views import blueprints
def create_app(config=None):
app = Flask(__name__)
if config:
app.config.from_pyfile(os.path.abspath(config))
else:
raise Exception('config file is not given')
install_errorhandler(app)
register_blueprints(app)
cache.init_app(app)
monkey_patch_response_html()
monkey_patch_response_json()
return app
def register_blueprints(app):
for blueprint_name in blueprints:
path = 'schoool.views.%s' % blueprint_name
view = __import__(path, fromlist=[blueprint_name])
blueprint = getattr(view, 'view')
app.register_blueprint(blueprint)
def install_errorhandler(app):
def errorhandler(err):
accept = request.headers.get('Accept', '')
if 'application/json' in accept:
data = {
'status': err.code,
'name': err.name,
'description': err.description
}
res = json.dumps(data)
return Response(res, mimetype='application/json', status=err.code)
html = "<h1>{0}: {1}</h1><p>{2}</p>".format(err.code, err.name,
err.description)
return Response(html, status=err.code)
for code in default_exceptions.iterkeys():
app.error_handler_spec[None][code] = errorhandler
def monkey_patch_response_html():
@property
def _html(self):
return BeautifulSoup(self.text, 'html.parser')
requests.Response.html = _html
def monkey_patch_response_json():
@property
def _json(self):
return json.loads(self.text)
requests.Response.json = _json
| mit | Python |
c2f480cf1709b06cfe7b1373165efa968e4096e3 | bump version number | AcademicTorrents/python-r-api | academictorrents/version.py | academictorrents/version.py | __version__ = "2.0.12"
| __version__ = "2.0.11"
| mit | Python |
0823c5266caf97f05524b1ff3b1e7f99f95e1911 | make shell context | varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish | ui/manage.py | ui/manage.py | #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Shell, Manager
app = create_app(os.getenv('APP_CONFIG') or 'default')
manager = Manager(app)
def make_shell_context():
return dict(app=app)
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Shell, Manager
app = create_app(os.getenv('APP_CONFIG') or 'default')
manager = Manager(app)
if __name__ == '__main__':
manager.run()
| bsd-2-clause | Python |
3b94cc59b444e166467e6cb81e2e07e80bdebf28 | disable cache-flushing again, leave caching for only 1 hour | total-impact/total-impact-core,Impactstory/total-impact-core,Impactstory/total-impact-core,total-impact/total-impact-core,total-impact/total-impact-core,Impactstory/total-impact-core,total-impact/total-impact-core,Impactstory/total-impact-core | totalimpact/cache.py | totalimpact/cache.py | import os
import pylibmc
import hashlib
import logging
import json
from cPickle import PicklingError
from totalimpact.utils import Retry
# set up logging
logger = logging.getLogger("ti.cache")
class CacheException(Exception):
pass
class Cache(object):
""" Maintains a cache of URL responses in memcached """
def _build_hash_key(self, key):
json_key = json.dumps(key)
hash_key = hashlib.md5(json_key.encode("utf-8")).hexdigest()
return hash_key
def _get_memcached_client(self):
mc = pylibmc.Client(
servers=[os.environ.get('MEMCACHE_SERVERS')],
username=os.environ.get('MEMCACHE_USERNAME'),
password=os.environ.get('MEMCACHE_PASSWORD'),
binary=True)
return mc
def __init__(self, max_cache_age=60*60):
self.max_cache_age = max_cache_age
# uncomment if you want to flush the cache
# mc = self._get_memcached_client()
# mc.flush_all()
@Retry(3, pylibmc.Error, 0.1)
def get_cache_entry(self, key):
""" Get an entry from the cache, returns None if not found """
mc = self._get_memcached_client()
hash_key = self._build_hash_key(key)
response = mc.get(hash_key)
return response
@Retry(3, pylibmc.Error, 0.1)
def set_cache_entry(self, key, data):
""" Store a cache entry """
mc = self._get_memcached_client()
hash_key = self._build_hash_key(key)
try:
set_response = mc.set(hash_key, data, time=self.max_cache_age)
if not set_response:
raise CacheException("Unable to store into Memcached. Make sure memcached server is running.")
except PicklingError:
# This happens when trying to cache a thread.lock object, for example. Just don't cache.
logger.debug("In set_cache_entry but got PicklingError")
set_response = None
return (set_response)
| import os
import pylibmc
import hashlib
import logging
import json
from cPickle import PicklingError
from totalimpact.utils import Retry
# set up logging
logger = logging.getLogger("ti.cache")
class CacheException(Exception):
pass
class Cache(object):
""" Maintains a cache of URL responses in memcached """
def _build_hash_key(self, key):
json_key = json.dumps(key)
hash_key = hashlib.md5(json_key.encode("utf-8")).hexdigest()
return hash_key
def _get_memcached_client(self):
mc = pylibmc.Client(
servers=[os.environ.get('MEMCACHE_SERVERS')],
username=os.environ.get('MEMCACHE_USERNAME'),
password=os.environ.get('MEMCACHE_PASSWORD'),
binary=True)
return mc
def __init__(self, max_cache_age=60*60):
self.max_cache_age = max_cache_age
# uncomment if you want to flush the cache
mc = self._get_memcached_client()
mc.flush_all()
@Retry(3, pylibmc.Error, 0.1)
def get_cache_entry(self, key):
""" Get an entry from the cache, returns None if not found """
mc = self._get_memcached_client()
hash_key = self._build_hash_key(key)
response = mc.get(hash_key)
return response
@Retry(3, pylibmc.Error, 0.1)
def set_cache_entry(self, key, data):
""" Store a cache entry """
mc = self._get_memcached_client()
hash_key = self._build_hash_key(key)
try:
set_response = mc.set(hash_key, data, time=self.max_cache_age)
if not set_response:
raise CacheException("Unable to store into Memcached. Make sure memcached server is running.")
except PicklingError:
# This happens when trying to cache a thread.lock object, for example. Just don't cache.
logger.debug("In set_cache_entry but got PicklingError")
set_response = None
return (set_response)
| mit | Python |
3163d016db3849c3c9e801c1cdb9e6e907afa313 | install python files to libxml2 prefix instead of python prefix and ignore non-python files when activating | TheTimmy/spack,skosukhin/spack,EmreAtes/spack,krafczyk/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,iulian787/spack,matthiasdiener/spack,iulian787/spack,TheTimmy/spack,EmreAtes/spack,lgarren/spack,LLNL/spack,EmreAtes/spack,skosukhin/spack,lgarren/spack,TheTimmy/spack,skosukhin/spack,lgarren/spack,krafczyk/spack,TheTimmy/spack,matthiasdiener/spack,EmreAtes/spack,LLNL/spack,lgarren/spack,mfherbst/spack,matthiasdiener/spack,tmerrick1/spack,LLNL/spack,TheTimmy/spack,mfherbst/spack,matthiasdiener/spack,mfherbst/spack,mfherbst/spack,krafczyk/spack,lgarren/spack,skosukhin/spack,iulian787/spack,EmreAtes/spack,tmerrick1/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,skosukhin/spack,tmerrick1/spack,LLNL/spack | var/spack/packages/libxml2/package.py | var/spack/packages/libxml2/package.py | from spack import *
import os
class Libxml2(Package):
"""Libxml2 is the XML C parser and toolkit developed for the Gnome
project (but usable outside of the Gnome platform), it is free
software available under the MIT License."""
homepage = "http://xmlsoft.org"
url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz"
version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
variant('python', default=False, description='Enable Python support')
extends('python', when='+python', ignore=r'(bin.*$)|(include.*$)|(share.*$)|(lib/libxml2.*$)|(lib/xml2.*$)|(lib/cmake.*$)')
depends_on('zlib')
depends_on('xz')
def install(self, spec, prefix):
if '+python' in spec:
site_packages_dir = os.path.join(prefix, 'lib/python%s.%s/site-packages' %(spec['python'].version[:2]))
python_args = ["--with-python=%s" % spec['python'].prefix, "--with-python-install-dir=%s" % site_packages_dir]
else:
python_args = ["--without-python"]
configure("--prefix=%s" % prefix,
*python_args)
make()
make("install")
| from spack import *
class Libxml2(Package):
"""Libxml2 is the XML C parser and toolkit developed for the Gnome
project (but usable outside of the Gnome platform), it is free
software available under the MIT License."""
homepage = "http://xmlsoft.org"
url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz"
version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
variant('python', default=False, description='Enable Python support')
extends('python', when='+python')
depends_on('zlib')
depends_on('xz')
def install(self, spec, prefix):
if '+python' in spec:
python_arg = "--with-python=%s" % spec['python'].prefix
else:
python_arg = "--without-python"
configure("--prefix=%s" % prefix,
python_arg)
make()
make("install")
| lgpl-2.1 | Python |
84929e01bfb9236fd0f51d82ee514d513d018408 | Sort dimensins to reduce code | CubicComet/exercism-python-solutions | triangle/triangle.py | triangle/triangle.py | class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
| class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
| agpl-3.0 | Python |
45f253560e2c0da9472285fccc7f3cba652fde69 | Test fade to white | guglielmino/selfie-o-matic | tasks/task_countdown.py | tasks/task_countdown.py |
import sys, os
import time
try:
from picamera.array import PiRGBArray
from picamera import PiCamera
except:
pass
import cv2
from cv2 import VideoCapture
import numpy as np
import logging
import settings
from PIL import Image
from image_lib import overlay_image, overlay_np_image_pi, overlay_pil_image_pi
from task_common import TaskFrameProcessorBase
class CountdownTask(TaskFrameProcessorBase):
'''
Overlay del countdown
'''
start_time = None
_is_completed = False
_running_img = None
_overlay = None
counters = [
cv2.imread('res/images/3.png'),
cv2.imread('res/images/2.png'),
cv2.imread('res/images/1.png')
]
pil_img = [
Image.open('res/images/3.jpg'),
Image.open('res/images/2.jpg'),
Image.open('res/images/1.jpg')
]
def __init__(self, ctx):
TaskFrameProcessorBase.__init__(self, ctx)
self._is_completed = False
def process_frame(self, frame):
if self.start_time is None:
self.start_time = time.time()
diff_time = int(round(time.time() - self.start_time))
if diff_time < 3:
img = self.counters[diff_time]
if self.device_ctx.camera is None:
frame = overlay_image(frame, img)
else:
if self._running_img != self.pil_img[diff_time]:
if self._overlay is not None:
self.device_ctx.camera.remove_overlay(self._overlay)
self._running_img = self.pil_img[diff_time]
self._overlay = overlay_pil_image_pi(self.device_ctx.camera, self._running_img, (640, 480))
else is not None:
self.device_ctx.camera.remove_overlay(self._overlay)
else:
self._is_completed = True
return frame
def is_completed(self):
return self._is_completed
|
import sys, os
import time
try:
from picamera.array import PiRGBArray
from picamera import PiCamera
except:
pass
import cv2
from cv2 import VideoCapture
import numpy as np
import logging
import settings
from PIL import Image
from image_lib import overlay_image, overlay_np_image_pi, overlay_pil_image_pi
from task_common import TaskFrameProcessorBase
class CountdownTask(TaskFrameProcessorBase):
'''
Overlay del countdown
'''
start_time = None
_is_completed = False
_running_img = None
_overlay = None
counters = [
cv2.imread('res/images/3.png'),
cv2.imread('res/images/2.png'),
cv2.imread('res/images/1.png')
]
pil_img = [
Image.open('res/images/3.jpg'),
Image.open('res/images/2.jpg'),
Image.open('res/images/1.jpg')
]
def __init__(self, ctx):
TaskFrameProcessorBase.__init__(self, ctx)
self._is_completed = False
def process_frame(self, frame):
if self.start_time is None:
self.start_time = time.time()
diff_time = int(round(time.time() - self.start_time))
if diff_time < 3:
img = self.counters[diff_time]
if self.device_ctx.camera is None:
frame = overlay_image(frame, img)
else:
if self._running_img != self.pil_img[diff_time]:
if self._overlay is not None:
self.device_ctx.camera.remove_overlay(self._overlay)
self._running_img = self.pil_img[diff_time]
self._overlay = overlay_pil_image_pi(self.device_ctx.camera, self._running_img, (640, 480))
else:
self.device_ctx.camera.remove_overlay(self._overlay)
else:
self._is_completed = True
return frame
def is_completed(self):
return self._is_completed
| mit | Python |
41db16ee01600a14703e68ec9ec529150359c27e | Remove unused import | mesosphere-mergebot/mergebot-test-dcos,dcos/dcos,mesosphere-mergebot/dcos,mesosphere-mergebot/dcos,dcos/dcos,kensipe/dcos,dcos/dcos,kensipe/dcos,dcos/dcos,dcos/dcos,mesosphere-mergebot/dcos,GoelDeepak/dcos,mesosphere-mergebot/mergebot-test-dcos,GoelDeepak/dcos,mesosphere-mergebot/dcos,kensipe/dcos,GoelDeepak/dcos,mesosphere-mergebot/mergebot-test-dcos,GoelDeepak/dcos,mesosphere-mergebot/mergebot-test-dcos,kensipe/dcos | packages/dcos-integration-test/extra/test_metronome.py | packages/dcos-integration-test/extra/test_metronome.py | __maintainer__ = 'ichernetsky'
__contact__ = 'marathon-team@mesosphere.io'
def test_metronome(dcos_api_session):
job = {
'description': 'Test Metronome API regressions',
'id': 'test.metronome',
'run': {
'cmd': 'ls',
'docker': {'image': 'busybox:latest'},
'cpus': 1,
'mem': 512,
'disk': 0,
'user': 'nobody',
'restart': {'policy': 'ON_FAILURE'}
}
}
dcos_api_session.metronome_one_off(job)
| import pytest
__maintainer__ = 'ichernetsky'
__contact__ = 'marathon-team@mesosphere.io'
def test_metronome(dcos_api_session):
job = {
'description': 'Test Metronome API regressions',
'id': 'test.metronome',
'run': {
'cmd': 'ls',
'docker': {'image': 'busybox:latest'},
'cpus': 1,
'mem': 512,
'disk': 0,
'user': 'nobody',
'restart': {'policy': 'ON_FAILURE'}
}
}
dcos_api_session.metronome_one_off(job)
| apache-2.0 | Python |
4712e44b11cb7cb276c98691d6de05e21e25d118 | improve coverage | chfw/django-excel,fondelsur/todopinturas,chfw/django-excel,fondelsur/todopinturas,fondelsur/todopinturas,fondelsur/todopinturas,chfw/django-excel | django_excel/__init__.py | django_excel/__init__.py | from django.core.files.uploadhandler import MemoryFileUploadHandler, TemporaryFileUploadHandler
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
from django.http import HttpResponse
import pyexcel as pe
import pyexcel_webio as webio
class ExcelMemoryFile(webio.ExcelInput, InMemoryUploadedFile):
def _get_file_extension(self):
extension = self.name.split(".")[1]
return extension
def load_single_sheet(self, sheet_name=None, **keywords):
return pe.load_from_memory(self._get_file_extension(), self.file.read(), sheet_name, **keywords)
def load_book(self):
return pe.load_book_from_memory(self._get_file_extension(), self.file.read())
class ExcelFile(webio.ExcelInput, TemporaryUploadedFile):
def _get_file_extension(self):
extension = self.name.split(".")[1]
return extension
def load_single_sheet(self, sheet_name=None, **keywords):
return pe.load_from_memory(self._get_file_extension(), self.file.read(), sheet_name, **keywords)
def load_book(self):
return pe.load_book_from_memory(self._get_file_extension(), self.file.read())
class ExcelMemoryFileUploadHandler(MemoryFileUploadHandler):
def file_complete(self, file_size):
if not self.activated:
return
self.file.seek(0)
return ExcelMemoryFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra
)
class TemporaryExcelFileUploadHandler(TemporaryFileUploadHandler):
def new_file(self, file_name, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
self.file = ExcelFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
webio.ExcelResponse = HttpResponse
from pyexcel_webio import (
make_response,
make_response_from_array,
make_response_from_dict,
make_response_from_records,
make_response_from_book_dict
)
| from django.core.files.uploadhandler import MemoryFileUploadHandler, TemporaryFileUploadHandler
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
from django.http import HttpResponse
import pyexcel as pe
import pyexcel_webio as webio
class ExcelMemoryFile(webio.ExcelInput, InMemoryUploadedFile):
def _get_file_extension(self):
extension = self.name.split(".")[1]
return extension
def load_single_sheet(self, sheet_name=None, **keywords):
return pe.load_from_memory(self._get_file_extension(), self.file.read(), sheet_name, **keywords)
def load_book(self):
return pe.load_book_from_memory(self._get_file_extension(), self.file.read())
class ExcelFile(webio.ExcelInput, TemporaryUploadedFile):
def load_single_sheet(self, sheet_name=None, **keywords):
return pe.load(self.file.replace(".upload", ""), sheet_name, **keywords)
def load_book(self):
return pe.load_book(self.file.replace(".upload", ""))
class ExcelMemoryFileUploadHandler(MemoryFileUploadHandler):
def file_complete(self, file_size):
if not self.activated:
return
self.file.seek(0)
return ExcelMemoryFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra
)
class TemporaryExcelFileUploadHandler(TemporaryFileUploadHandler):
def file_complete(self, file_size):
self.file.seek(0)
return ExcelMemoryFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra
)
webio.ExcelResponse = HttpResponse
from pyexcel_webio import (
make_response,
make_response_from_array,
make_response_from_dict,
make_response_from_records,
make_response_from_book_dict
)
| bsd-3-clause | Python |
b143c7ae9bbfa7b67ad5111ffce417f380e7f0ea | Bump version to 1.0a1 for dev. | carljm/django-secure,bocman/django-secure,bocman/django-secure,carljm/django-secure | djangosecure/__init__.py | djangosecure/__init__.py | __version__ = "1.0.a1"
| __version__ = "0.1.3"
| bsd-3-clause | Python |
f085c1eb9fabf2266376b884b414b85575c2677a | update version | bird-house/twitcher,bird-house/pywps-proxy,bird-house/pywps-proxy | twitcher/__init__.py | twitcher/__init__.py | import logging
logger = logging.getLogger(__name__)
__version__ = '0.3.1'
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
from pyramid.config import Configurator
config = Configurator(settings=settings)
# include twitcher components
config.include('twitcher.config')
config.include('twitcher.frontpage')
config.include('twitcher.rpcinterface')
config.include('twitcher.owsproxy')
config.include('twitcher.wps')
# tweens/middleware
# TODO: maybe add tween for exception handling or use unknown_failure view
config.include('twitcher.tweens')
config.scan()
return config.make_wsgi_app()
| import logging
logger = logging.getLogger(__name__)
__version__ = '0.3.0'
def main(global_config, **settings):
"""
This function returns a Pyramid WSGI application.
"""
from pyramid.config import Configurator
config = Configurator(settings=settings)
# include twitcher components
config.include('twitcher.config')
config.include('twitcher.frontpage')
config.include('twitcher.rpcinterface')
config.include('twitcher.owsproxy')
config.include('twitcher.wps')
# tweens/middleware
# TODO: maybe add tween for exception handling or use unknown_failure view
config.include('twitcher.tweens')
config.scan()
return config.make_wsgi_app()
| apache-2.0 | Python |
c6d285dd2a80ae713e1399ce1efaf9b514878755 | Fix test | github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql | python/ql/test/library-tests/frameworks/aiopg/test.py | python/ql/test/library-tests/frameworks/aiopg/test.py | import aiopg
# Only a cursor can execute sql.
async def test_cursor():
# Create connection directly
conn = await aiopg.connect()
cur = await conn.cursor()
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# Create connection via pool
async with aiopg.create_pool() as pool:
# Create Cursor via Connection
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# Create Cursor directly
async with pool.cursor() as cur:
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# variants using as few `async with` as possible
pool = await aiopg.create_pool()
conn = await pool.acquire()
cur = await conn.cursor()
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# Test SQLAlchemy integration
from aiopg.sa import create_engine
async def test_engine():
engine = await create_engine()
conn = await engine.acquire()
await conn.execute("sql") # $ getSql="sql" constructedSql="sql"
| import aiopg
# Only a cursor can execute sql.
async def test_cursor():
# Create connection directly
conn = await aiopg.connect()
cur = await conn.cursor()
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# Create connection via pool
async with aiopg.create_pool() as pool:
# Create Cursor via Connection
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# Create Cursor directly
async with pool.cursor() as cur:
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# variants using as few `async with` as possible
pool = await aiopg.create_pool()
conn = pool.acquire()
cur = await conn.cursor()
await cur.execute("sql") # $ getSql="sql" constructedSql="sql"
# Test SQLAlchemy integration
from aiopg.sa import create_engine
async def test_engine():
engine = await create_engine()
conn = await engine.acquire()
await conn.execute("sql") # $ getSql="sql" constructedSql="sql"
| mit | Python |
eb34c605fc970a70b9f97a79094997757940c9e8 | Fix boilerplate. | whilp/statzlogger | statzlogger.py | statzlogger.py | import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger().addHandler(NullHandler())
class StatsLogger(logging.Logger):
"""A statistics logger.
Methods stolen from szl:
collection: A simple collection or concatenation of the data.
sample: A statistical sampling of N items.
sum: An arithmetic sum of the data.
top: Statistical samplings that record the `top N' data items.
maximum: A precise sample of the N highest-weighted data items.
minimum: A precise sample of the N lowest-weighted data items.
unique: Statistical estimators for the total number of unique data items.
set: A set of size at most N. Larger sets are discarded.
quantile: Approximate quantiles (actually N-tiles) for data items from an ordered domain.
distinctsample: A uniform sample of a given size from a set of all values seen.
inversehistogram: An approximate histogram of unique values.
weightedsample: A sample of a given size from a set of all values seen, biased towards values with higher weights.
recordio: An unindexed collection written directly to a binary file.
text: An unindexed collection written directly to a plain file.
mrcounter: An integer counter that can be used to provide an accumulated count (e.g. a progress indicator) to a C++ program invoking the Sawzall interpreter.
"""
def log(self, level, msg, *args, **kwargs):
pass
class CollectionLogger(logging.Logger):
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None):
rv = LogRecord()
return rv
class CollectionHandler(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.collections = {}
def emit(self, record):
for index in record.indexes:
self.collections.setdefault(index, []).append(record)
class SumHandler(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.
import logging
try:
from statslogger import StatsLogger
except ImportError:
class StatsLogger(logging.Logger):
def stats(self, *args, **kwags):
pass
logging.setLoggerClass(StatsLogger)
log = logging.getLogger("fans")
log.stats(("britney", "a britney song"), index=fan_age/10)
import statzlogger as szl
szl.collection("fans", ("britney", "a britney song"),
index=fan_age/10)
szl.sum("divafans", 1, index="britney")
szl.sum("sales", 0.25, index=1)
szl.sum("sales.hourly", "
| import logging
try:
NullHandler = logging.NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger().addHandler(NullHandler())
class StatsLogger(logging.Logger):
"""A statistics logger.
Methods stolen from szl:
collection: A simple collection or concatenation of the data.
sample: A statistical sampling of N items.
sum: An arithmetic sum of the data.
top: Statistical samplings that record the `top N' data items.
maximum: A precise sample of the N highest-weighted data items.
minimum: A precise sample of the N lowest-weighted data items.
unique: Statistical estimators for the total number of unique data items.
set: A set of size at most N. Larger sets are discarded.
quantile: Approximate quantiles (actually N-tiles) for data items from an ordered domain.
distinctsample: A uniform sample of a given size from a set of all values seen.
inversehistogram: An approximate histogram of unique values.
weightedsample: A sample of a given size from a set of all values seen, biased towards values with higher weights.
recordio: An unindexed collection written directly to a binary file.
text: An unindexed collection written directly to a plain file.
mrcounter: An integer counter that can be used to provide an accumulated count (e.g. a progress indicator) to a C++ program invoking the Sawzall interpreter.
"""
def log(self, level, msg, *args, **kwargs):
pass
class CollectionLogger(logging.Logger):
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None):
rv = LogRecord()
return rv
class CollectionHandler(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.collections = {}
def emit(self, record):
for index in record.indexes:
self.collections.setdefault(index, []).append(record)
class SumHandler(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.
import logging
try:
from statslogger import StatsLogger
except ImportError:
class StatsLogger(logging.Logger):
def stats(self, *args, **kwags):
pass
logging.setLoggerClass(StatsLogger)
log = logging.getLogger("fans")
log.stats(("britney", "a britney song"), index=fan_age/10)
import statzlogger as szl
szl.collection("fans", ("britney", "a britney song"),
index=fan_age/10)
szl.sum("divafans", 1, index="britney")
szl.sum("sales", 0.25, index=1)
szl.sum("sales.hourly", "
| isc | Python |
426aba0b0fa278e721dfc663db1b60d15dba16d5 | Test staticfiles.json beginning string | bulv1ne/django-utils,bulv1ne/django-utils | utils/tests/test_pipeline.py | utils/tests/test_pipeline.py | import os
from io import StringIO
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
class PipelineTestCase(TestCase):
def setUp(self):
self.file_path = os.path.join(settings.STATIC_ROOT, 'staticfiles.json')
if os.path.isfile(self.file_path):
os.remove(self.file_path)
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
with open(self.file_path) as f:
contents = f.read()
start_content = '{\n "paths": {\n'
self.assertTrue(
contents.startswith(start_content),
"staticfiles.json doesn't start with \"{}\"".format(contents[:len(start_content)])
)
def test_missing_staticfilesjson(self):
with self.assertRaises(CommandError):
call_command('clean_staticfilesjson', stdout=StringIO())
| import os
from io import StringIO
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
class PipelineTestCase(TestCase):
def setUp(self):
file_path = os.path.join(settings.STATIC_ROOT, 'staticfiles.json')
if os.path.isfile(file_path):
os.remove(file_path)
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
def test_missing_staticfilesjson(self):
with self.assertRaises(CommandError):
call_command('clean_staticfilesjson', stdout=StringIO())
| mit | Python |
ea08b388e29c83fdbbcc6d35d88627d4afe5f859 | Clean unused cruft | thiderman/storm | storm/cloud.py | storm/cloud.py | import sys
import pyinotify as inf
import asyncore
import logbook
from storm import util
from storm import conf
from storm import bolt
class EventHandler(inf.ProcessEvent):
font = conf.CONFIG['font']['name']
separator_color = conf.CONFIG['colors']['sep']
width = util.get_screen_size()
def __init__(self, *args, **kwargs):
# TODO: get LoggedClass to roll with multiple inheritance.
name = self.__class__.__name__
self.log = logbook.Logger(name)
self.log.debug('Loaded logger for {0}', name)
def setup(self):
self.left = bolt.BoltLine()
self.left.register_bolts(*conf.CONFIG['items']['left'])
self.right = bolt.BoltLine()
self.right.register_bolts(*conf.CONFIG['items']['right'])
def handle(self, event):
if event and 'debug' in conf.CONFIG:
self.log.debug('{0} on: {1}', event.maskname, event.pathname)
spacer = "^pa(%d)" % (self.width - self.right.width() - 90)
line = "%s%s%s" % (self.left.compile(), spacer, self.right.compile())
assert '\n' not in line, 'Line break in output'
sys.stdout.write('\n' + line)
sys.stdout.flush()
process_IN_DELETE = handle
process_IN_CLOSE_WRITE = handle
process_IN_MODIFY = handle
class Cloud():
def start(self):
mask = inf.IN_DELETE | inf.IN_CREATE | inf.IN_MODIFY
wm = inf.WatchManager()
eh = EventHandler()
eh.setup()
inf.AsyncNotifier(wm, eh)
wm.add_watch(conf.ROOT, mask, rec=True)
# Run the initial grab of data
eh.handle(None)
asyncore.loop()
def main():
cloud = Cloud()
cloud.start()
if __name__ == '__main__':
main()
| import sys
import pyinotify as inf
import asyncore
import logbook
from storm import util
from storm import conf
from storm import bolt
class EventHandler(inf.ProcessEvent):
# Setup some static vars that should really be in a conf file.
# TODO: Conf file plz.
font = conf.CONFIG['font']['name']
separator_color = conf.CONFIG['colors']['sep']
width = util.get_screen_size()
descriptors = {}
def __init__(self, *args, **kwargs):
# TODO: get LoggedClass to roll with multiple inheritance.
name = self.__class__.__name__
self.log = logbook.Logger(name)
self.log.debug('Loaded logger for {0}', name)
def setup(self):
self.left = bolt.BoltLine()
self.left.register_bolts(*conf.CONFIG['items']['left'])
self.right = bolt.BoltLine()
self.right.register_bolts(*conf.CONFIG['items']['right'])
def filename(self, path):
return path.split('/')[-1]
def handle(self, event):
if event and 'debug' in conf.CONFIG:
self.log.debug('{0} on: {1}', event.maskname, event.pathname)
spacer = "^pa(%d)" % (self.width - self.right.width() - 90)
line = "%s%s%s" % (self.left.compile(), spacer, self.right.compile())
assert '\n' not in line, 'Line break in output'
sys.stdout.write('\n' + line)
sys.stdout.flush()
def process_IN_CREATE(self, event):
name = self.filename(event.pathname)
if name not in self.descriptors:
self.load_descriptor(event.pathname)
self.handle(event)
process_IN_DELETE = handle
process_IN_CLOSE_WRITE = handle
process_IN_MODIFY = handle
class Cloud():
def start(self):
mask = inf.IN_DELETE | inf.IN_CREATE | inf.IN_MODIFY
wm = inf.WatchManager()
eh = EventHandler()
eh.setup()
inf.AsyncNotifier(wm, eh)
wm.add_watch(conf.ROOT, mask, rec=True)
# Run the initial grab of data
eh.handle(None)
asyncore.loop()
def main():
cloud = Cloud()
cloud.start()
if __name__ == '__main__':
main()
| mit | Python |
7954487cdf7607497e62322690eb5c497798a401 | Fix pep8 issue | alfredhq/alfred-collector | alfred_collector/process.py | alfred_collector/process.py | import msgpack
import multiprocessing
import zmq
from alfred_db.models import Report, Fix
from datetime import datetime
from markdown import markdown
from sqlalchemy import create_engine
class CollectorProcess(multiprocessing.Process):
def __init__(self, database_uri, socket_address):
super().__init__()
self.database_uri = database_uri
self.socket_address = socket_address
def run(self):
self.engine = create_engine(self.database_uri)
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.bind(self.socket_address)
while True:
msg = socket.recv()
self.handle_msg(msg)
socket.close()
context.term()
def handle_msg(self, msg):
report_id, msg_type, msg_data = msgpack.unpackb(msg, encoding='utf-8')
handlers = {
'fix': self.handle_fix,
'finish': self.handle_finish,
}
handler = handlers.get(msg_type)
if handler is not None:
handler(report_id, msg_data)
def handle_fix(self, report_id, data):
Fix.__table__.insert(bind=self.engine).execute(
description=data['description'],
description_html=markdown(data['description']),
path=data['path'],
line=data['line'],
source=data['source'],
solution=data['solution'],
report_id=report_id,
)
def handle_finish(self, report_id, data):
(Report.__table__
.update(bind=self.engine)
.where(Report.id == report_id)
.execute(error=data, finished_on=datetime.utcnow()))
| import msgpack
import multiprocessing
import zmq
from alfred_db.models import Report, Fix
from datetime import datetime
from markdown import markdown
from sqlalchemy import create_engine
class CollectorProcess(multiprocessing.Process):
def __init__(self, database_uri, socket_address):
super().__init__()
self.database_uri = database_uri
self.socket_address = socket_address
def run(self):
self.engine = create_engine(self.database_uri)
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.bind(self.socket_address)
while True:
msg = socket.recv()
self.handle_msg(msg)
socket.close()
context.term()
def handle_msg(self, msg):
report_id, msg_type, msg_data = msgpack.unpackb(msg, encoding='utf-8')
handlers = {
'fix': self.handle_fix,
'finish': self.handle_finish,
}
handler = handlers.get(msg_type)
if handler is not None:
handler(report_id, msg_data)
def handle_fix(self, report_id, data):
Fix.__table__.insert(bind=self.engine).execute(
description=data['description'],
description_html=markdown(data['description']),
path=data['path'],
line=data['line'],
source=data['source'],
solution=data['solution'],
report_id=report_id,
)
def handle_finish(self, report_id, data):
(Report.__table__
.update(bind=self.engine)
.where(Report.id==report_id)
.execute(error=data, finished_on=datetime.utcnow()))
| isc | Python |
95a25b401d5430fd8cbfcfcb3bc6c691bf2c40ad | Remove unnecessary import | adangtran87/gbf-weap | summon_list.py | summon_list.py | class SummonList:
def __init__(self, my_summons, helper_summons):
self.my_summons = my_summons
self.helper_summons = helper_summons
# Pair your summon with each friend list summon
# @return List of summon pairs
@property
def summon_pairs(self):
summon_pair_list = []
for mine in self.my_summons:
for helper in self.helper_summons:
summon_pair_list.append((mine, helper))
return summon_pair_list
| from wep_types import SummonType, Summon
class SummonList:
def __init__(self, my_summons, helper_summons):
self.my_summons = my_summons
self.helper_summons = helper_summons
# Pair your summon with each friend list summon
# @return List of summon pairs
@property
def summon_pairs(self):
summon_pair_list = []
for mine in self.my_summons:
for helper in self.helper_summons:
summon_pair_list.append((mine, helper))
return summon_pair_list
| mit | Python |
52f30bb037241ddb4b12fd5f6e3d72c6de49c0dc | make survey closed url matching a bit more restrictive | mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey | survey/urls.py | survey/urls.py | from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss]/.*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
)
| from django.conf.urls import patterns, url, include
from survey.views import *
urlpatterns = patterns('',
url(r'^about', 'survey.views.about', name='about'),
url(r'^management', 'survey.views.management', name='management'),
url(r'^contact', 'survey.views.contact', name='contact'),
url(r'^survey2/(?P<id>[0-9A-Za-z]+)-(?P<token>.+)/$', 'survey.views.survey2', name='survey2'),
url(r'^[Ss].*$', 'survey.views.closed', name='survey'),
url(r'^record$', 'survey.views.record', name='record'),
url(r'^record2$', 'survey.views.record2', name='record2'),
url(r'^export$', 'survey.views.export', name='export'),
)
| agpl-3.0 | Python |
8ae94fbc42d1999d12f4dd765ce2b2b7c6ddf3ad | Update test_tasks.py | lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django | agir/people/tests/test_tasks.py | agir/people/tests/test_tasks.py | from django.test import TestCase
from django.core import mail
from agir.people.models import Person
from agir.people import tasks
class PeopleTasksTestCase(TestCase):
def setUp(self):
self.person = Person.objects.create_insoumise("me@me.org", create_role=True)
def test_welcome_mail(self):
tasks.send_welcome_mail(self.person.pk, type="LFI")
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].recipients(), [self.person.email])
def test_unsubscribe_mail(self):
tasks.send_unsubscribe_email(self.person.pk)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].recipients(), [self.person.email])
def test_inactive_user_dont_receive_mail(self):
person = Person.objects.create_insoumise(
"inactiverole@me.org", create_role=True
)
person.role.is_active = False
person.role.save()
tasks.send_welcome_mail(person.pk, type="LFI")
self.assertEqual(len(mail.outbox), 0)
def test_no_role_user_receive_mail(self):
person = Person.objects.create_insoumise(
"inactiverole@me.org", create_role=False
)
person.save()
tasks.send_welcome_mail(person.pk, type="LFI")
self.assertEqual(len(mail.outbox), 1)
| from django.test import TestCase
from django.core import mail
from agir.people.models import Person
from agir.people import tasks
class PeopleTasksTestCase(TestCase):
def setUp(self):
self.person = Person.objects.create_insoumise("me@me.org", create_role=True)
def test_welcome_mail(self):
tasks.send_welcome_mail(self.person.pk, type="LFI")
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].recipients(), [self.person.email])
def test_unsubscribe_mail(self):
tasks.send_unsubscribe_email(self.person.pk)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].recipients(), [self.person.email])
def test_inactive_user_dont_receive_mail(self):
person = Person.objects.create_insoumise(
"inactiverole@me.org", create_role=True
)
person.role.is_active = False
person.role.save()
tasks.send_welcome_mail(person.pk, type="LFI")
self.assertEqual(len(mail.outbox), 0)
def test_no_role_user_receive_mail(self):
person = Person.objects.create_insoumise(
"inactiverole@me.org", create_role=False
)
person.save()
tasks.send_welcome_mail(person.pk, type="LFI")
self.assertEqual(len(mail.outbox), 1) | agpl-3.0 | Python |
29ef0c329425c0dcdc89b496a27f7c2e98134074 | update chgcar example | gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf | examples/tools/06-chgcar.py | examples/tools/06-chgcar.py | #!/usr/bin/env python
'''
Write orbitals, electron density in VASP CHGCAR format.
'''
import numpy as np
from pyscf.pbc import gto, scf
from pyscf.tools import chgcar
#
# Regular CHGCAR file for crystal cell
#
cell = gto.M(atom='H 0 0 0; H 0 0 1', a=np.eye(3)*3)
mf = scf.RHF(cell).run()
# electron density
chgcar.density(cell, 'cell_h2.CHGCAR', mf.make_rdm1())
# 1st MO
chgcar.orbital(cell, 'cell_h2_mo1.CHGCAR', mf.mo_coeff[:,0])
#
# Extended mode to support molecular system. In this mode, a lattic was
# generated and the molecule was placed in the center of the unit cell.
#
from pyscf import gto, scf
mol = gto.M(atom='H 0 0 0; H 0 0 1')
mf = scf.RHF(mol).run()
# electron density
chgcar.density(mol, 'mole_h2.CHGCAR', mf.make_rdm1())
# 2nd MO
chgcar.orbital(mol, 'mole_h2_mo2.CHGCAR', mf.mo_coeff[:,1])
| #!/usr/bin/env python
'''
Write orbitals, electron density, molecular electrostatic potential in
Gaussian cube file format.
'''
import numpy as np
from pyscf.pbc import gto, scf
from pyscf.tools import chgcar
#
# Regular CHGCAR file for crystal cell
#
cell = gto.M(atom='H 0 0 0; H 0 0 1', a=np.eye(3)*3)
mf = scf.RHF(cell).run()
# electron density
chgcar.density(cell, 'cell_h2.CHGCAR', mf.make_rdm1())
# 1st MO
chgcar.orbital(cell, 'cell_h2_mo1.CHGCAR', mf.mo_coeff[:,0])
#
# Extended mode to support molecular system. In this mode, a lattic was
# generated and the molecule was placed in the center of the unit cell.
#
from pyscf import gto, scf
mol = gto.M(atom='H 0 0 0; H 0 0 1')
mf = scf.RHF(mol).run()
# electron density
chgcar.density(mol, 'mole_h2.CHGCAR', mf.make_rdm1())
# 2nd MO
chgcar.orbital(mol, 'mole_h2_mo2.CHGCAR', mf.mo_coeff[:,1])
| apache-2.0 | Python |
eeebc0d51e7d46af82fc2e27d975f540c5e56a4f | Replace old manage.py script used for testing with the default one created by django-admin | rockneurotiko/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud | src/manage.py | src/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
#...............................licence...........................................
#
# (C) Copyright 2008 Telefonica Investigacion y Desarrollo
# S.A.Unipersonal (Telefonica I+D)
#
# This file is part of Morfeo EzWeb Platform.
#
# Morfeo EzWeb Platform is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Morfeo EzWeb Platform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Morfeo EzWeb Platform. If not, see <http://www.gnu.org/licenses/>.
#
# Info about members and contributors of the MORFEO project
# is available at
#
# http://morfeo-project.org
#
#...............................licence...........................................#
import sys
import django
from django.core.management import ManagementUtility, LaxOptionParser, setup_environ
from django.core.management.base import BaseCommand, handle_default_options
from django.utils.translation import string_concat, gettext_lazy as _
class EzwebManagementUtility(ManagementUtility):
def execute(self):
"""
Given the command-line arguments, this figures out which subcommand is
being run, creates a parser appropriate to that command, and runs it.
"""
# Preprocess options to extract --settings and --pythonpath.
# These options could affect the commands that are available, so they
# must be processed early.
parser = LaxOptionParser(usage="%prog subcommand [options] [args]",
version=django.get_version(),
option_list=BaseCommand.option_list)
try:
options, args = parser.parse_args(self.argv)
handle_default_options(options)
except:
pass # Ignore any option errors at this point.
try:
subcommand = self.argv[1]
except IndexError:
sys.stderr.write("Type '%s help' for usage.\n" % self.prog_name)
sys.exit(1)
if subcommand == 'help':
if len(args) > 2:
self.fetch_command(args[2]).print_help(self.prog_name, args[2])
else:
parser.print_lax_help()
sys.stderr.write(self.main_help_text() + '\n')
sys.exit(1)
# Special-cases: We want 'manage.py --version' and
# 'manage.py --help' to work, for backwards compatibility.
elif self.argv[1:] == ['--version']:
# LaxOptionParser already takes care of printing the version.
pass
elif self.argv[1:] == ['--help']:
parser.print_lax_help()
sys.stderr.write(self.main_help_text() + '\n')
else:
from django.db import connections, DEFAULT_DB_ALIAS
db = options.__dict__.get('database', DEFAULT_DB_ALIAS)
mysql_workaround = subcommand == "syncdb" and settings.DATABASES[db]['ENGINE'] == "mysql"
if mysql_workaround:
connection = connections[db]
cursor = connection.cursor()
cursor.execute("SET FOREIGN_KEY_CHECKS = 0")
self.fetch_command(subcommand).run_from_argv(self.argv)
if mysql_workaround:
cursor = connection.cursor()
cursor.execute("SET FOREIGN_KEY_CHECKS = 1")
try:
import settings # Assumed to be in the same directory.
except ImportError:
message1 = _("Error: cannot find the file 'settings.py' in the directory containing %(file)r.\n") % {'file': __file__}
message1 = string_concat(message1, _("It seems you have customized things.\n"))
message1 = string_concat(message1, _("You will have to run django-admin.py, passing it your settings module.\n"))
message1 = string_concat(message1, _("(If the file settings.py does indeed exist, it is causing an ImportError somehow.)\n"))
sys.stderr.write(message1)
sys.exit(1)
if __name__ == "__main__":
setup_environ(settings)
utility = EzwebManagementUtility()
utility.execute()
| agpl-3.0 | Python |
b323ad25f62ae82468ef3ca7f4e6a4b72e550dbc | remove default ordering in apireport | sunlightlabs/django-locksmith,sunlightlabs/django-locksmith,sunlightlabs/django-locksmith | locksmith/auth/management/commands/apireport.py | locksmith/auth/management/commands/apireport.py | import datetime
from urlparse import urljoin
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.db.models import get_model, Count
from locksmith.common import apicall
APP = getattr(settings, 'LOCKSMITH_STATS_APP', 'api')
MODEL = getattr(settings, 'LOCKSMITH_STATS_MODEL', 'LogEntry')
DATE_FIELD = getattr(settings, 'LOCKSMITH_STATS_DATE_FIELD', 'timestamp')
ENDPOINT_FIELD = getattr(settings, 'LOCKSMITH_STATS_ENDPOINT_FIELD', 'method')
USER_FIELD = getattr(settings, 'LOCKSMITH_STATS_USER_FIELD', 'caller_key')
LogModel = get_model(APP, MODEL)
class Command(BaseCommand):
help = "Push a given day's logs up to the analytics hub"
args = '[date]'
requires_model_validation = False
def handle(self, date=None, *args, **options):
# calculate begin & end
if date:
begin = datetime.datetime.strptime(date, '%Y-%m-%d')
else:
begin = datetime.datetime.now() - datetime.timedelta(days=1)
date = begin.strftime('%Y-%m-%d')
end = begin + datetime.timedelta(days=1)
print 'pushing logs for %s' % date
# construct database query
timestamp_fieldname = '%s__range' % DATE_FIELD
qs = LogModel.objects.filter(**{timestamp_fieldname : (begin, end)}).order_by()
results = qs.values(ENDPOINT_FIELD, USER_FIELD).annotate(calls=Count('id'))
endpoint = urljoin(settings.LOCKSMITH_HUB_URL, 'report_calls/')
# report results
for item in results:
apicall(endpoint, settings.LOCKSMITH_SIGNING_KEY,
api=settings.LOCKSMITH_API_NAME, date=date,
endpoint=item[ENDPOINT_FIELD],
key=item[USER_FIELD], calls=item['calls'])
| import datetime
from urlparse import urljoin
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.db.models import get_model, Count
from locksmith.common import apicall
APP = getattr(settings, 'LOCKSMITH_STATS_APP', 'api')
MODEL = getattr(settings, 'LOCKSMITH_STATS_MODEL', 'LogEntry')
DATE_FIELD = getattr(settings, 'LOCKSMITH_STATS_DATE_FIELD', 'timestamp')
ENDPOINT_FIELD = getattr(settings, 'LOCKSMITH_STATS_ENDPOINT_FIELD', 'method')
USER_FIELD = getattr(settings, 'LOCKSMITH_STATS_USER_FIELD', 'caller_key')
LogModel = get_model(APP, MODEL)
class Command(BaseCommand):
help = "Push a given day's logs up to the analytics hub"
args = '[date]'
requires_model_validation = False
def handle(self, date=None, *args, **options):
# calculate begin & end
if date:
begin = datetime.datetime.strptime(date, '%Y-%m-%d')
else:
begin = datetime.datetime.now() - datetime.timedelta(days=1)
date = begin.strftime('%Y-%m-%d')
end = begin + datetime.timedelta(days=1)
print 'pushing logs for %s' % date
# construct database query
timestamp_fieldname = '%s__range' % DATE_FIELD
qs = LogModel.objects.filter(**{timestamp_fieldname : (begin, end)})
results = qs.values(ENDPOINT_FIELD, USER_FIELD).annotate(calls=Count('id'))
endpoint = urljoin(settings.LOCKSMITH_HUB_URL, 'report_calls/')
# report results
for item in results:
apicall(endpoint, settings.LOCKSMITH_SIGNING_KEY,
api=settings.LOCKSMITH_API_NAME, date=date,
endpoint=item[ENDPOINT_FIELD],
key=item[USER_FIELD], calls=item['calls'])
| bsd-3-clause | Python |
c7d30ed5e5b22aeb24e378c8f74dec64100d1dd1 | remove marks for testing transposability with X = None | theislab/anndata | anndata/tests/test_transpose.py | anndata/tests/test_transpose.py | from scipy import sparse
import pytest
from anndata.tests.helpers import gen_adata, assert_equal
def test_transpose_orig():
"""
Original test for transpose, should be covered by more thorough tests below, but
keeping around just in case.
"""
adata = gen_adata((5, 3))
adata.varp = {f"varp_{k}": v for k, v in adata.varp.items()}
adata1 = adata.T
adata1.uns["test123"] = 1
assert "test123" in adata.uns
assert_equal(adata1.X.shape, (3, 5))
assert_equal(adata1.obsp.keys(), adata.varp.keys())
def _add_raw(adata, *, var_subset=slice(None)):
new = adata[:, var_subset].copy()
new.raw = adata
return new
# TODO: Cases to add:
# * Views
# * X is None should have the xfail marker removed
# * Backed
@pytest.fixture(
params=[
pytest.param(gen_adata((50, 20)), id="csr_X"),
pytest.param(gen_adata((50, 20), sparse.csc_matrix), id="csc_X"),
pytest.param(_add_raw(gen_adata((50, 20))), id="with_raw"),
pytest.param(gen_adata((20, 10), X_type=None), id="None_X"),
]
)
def adata(request):
return request.param
def test_transpose_removes_raw(adata):
"""
Since Raw must have the same `obs_names` as AnnData, but does not have the same
`var_names`, transpose doesn't really make sense for Raw. So it should just get
deleted.
"""
assert adata.T.raw is None
def test_transposed_contents(adata):
t = adata.T
if adata.X is not None:
assert_equal(adata.X.T, t.X)
else:
assert adata.X is t.X is None
assert_equal(
{k: v.T for k, v in adata.layers.items()}, {k: v for k, v in t.layers.items()}
)
assert_equal(adata.obs, t.var)
assert_equal(adata.var, t.obs)
assert_equal(dict(adata.obsm), dict(t.varm))
assert_equal(dict(adata.varm), dict(t.obsm))
assert_equal(dict(adata.obsp), dict(t.varp))
assert_equal(dict(adata.varp), dict(t.obsp))
assert_equal(adata.uns, t.uns)
def test_transpose_roundtrip(adata):
del adata.raw
assert_equal(adata, adata.T.T)
| from scipy import sparse
import pytest
from anndata.tests.helpers import gen_adata, assert_equal
def test_transpose_orig():
"""
Original test for transpose, should be covered by more thorough tests below, but
keeping around just in case.
"""
adata = gen_adata((5, 3))
adata.varp = {f"varp_{k}": v for k, v in adata.varp.items()}
adata1 = adata.T
adata1.uns["test123"] = 1
assert "test123" in adata.uns
assert_equal(adata1.X.shape, (3, 5))
assert_equal(adata1.obsp.keys(), adata.varp.keys())
def _add_raw(adata, *, var_subset=slice(None)):
new = adata[:, var_subset].copy()
new.raw = adata
return new
# TODO: Cases to add:
# * Views
# * X is None should have the xfail marker removed
# * Backed
@pytest.fixture(
params=[
pytest.param(gen_adata((50, 20)), id="csr_X"),
pytest.param(gen_adata((50, 20), sparse.csc_matrix), id="csc_X"),
pytest.param(_add_raw(gen_adata((50, 20))), id="with_raw"),
pytest.param(
gen_adata((20, 10), X_type=None), id="None_X", marks=pytest.mark.xfail
),
]
)
def adata(request):
return request.param
def test_transpose_removes_raw(adata):
"""
Since Raw must have the same `obs_names` as AnnData, but does not have the same
`var_names`, transpose doesn't really make sense for Raw. So it should just get
deleted.
"""
assert adata.T.raw is None
def test_transposed_contents(adata):
t = adata.T
if adata.X is not None:
assert_equal(adata.X.T, t.X)
else:
assert adata.X is t.X is None
assert_equal(
{k: v.T for k, v in adata.layers.items()}, {k: v for k, v in t.layers.items()}
)
assert_equal(adata.obs, t.var)
assert_equal(adata.var, t.obs)
assert_equal(dict(adata.obsm), dict(t.varm))
assert_equal(dict(adata.varm), dict(t.obsm))
assert_equal(dict(adata.obsp), dict(t.varp))
assert_equal(dict(adata.varp), dict(t.obsp))
assert_equal(adata.uns, t.uns)
def test_transpose_roundtrip(adata):
del adata.raw
assert_equal(adata, adata.T.T)
| bsd-3-clause | Python |
581220d46d6568c33148298feb1c21d4184720f5 | Fix year import. | fi-ksi/web-backend,fi-ksi/web-backend | util/year.py | util/year.py | # -*- coding: utf-8 -*-
from db import session
import model
from util import config
import util
def to_json(year, sum_points=None):
if sum_points is None: sum_points = util.task.max_points_year_dict()[year.id]
print sum_points
return {
'id': year.id,
'index': year.id,
'year': year.year,
'sum_points': sum_points[0],
'tasks_cnt': int(sum_points[1])
}
| # -*- coding: utf-8 -*-
from db import session
import model
from util import config
def to_json(year, sum_points=None):
if sum_points is None: sum_points = util.task.max_points_year_dict()[year.id]
print sum_points
return {
'id': year.id,
'index': year.id,
'year': year.year,
'sum_points': sum_points[0],
'tasks_cnt': int(sum_points[1])
}
| mit | Python |
0fe82c97db166953d821513c12b0c3662e8faa96 | Fix PyLint warnings. | ymoch/apyori | test/test_apriori.py | test/test_apriori.py | """
Tests for apyori.apriori.
"""
from nose.tools import eq_
from mock import Mock
from apyori import TransactionManager
from apyori import SupportRecord
from apyori import RelationRecord
from apyori import OrderedStatistic
from apyori import apriori
def test_empty():
"""
Test for empty data.
"""
transaction_manager = Mock(spec=TransactionManager)
def gen_support_records(*_):
""" Mock for apyori.gen_support_records. """
return iter([])
def gen_ordered_statistics(*_):
""" Mock for apyori.gen_ordered_statistics. """
yield OrderedStatistic(
frozenset(['A']), frozenset(['B']), 0.1, 0.7)
result = list(apriori(
transaction_manager,
_gen_support_records=gen_support_records,
_gen_ordered_statistics=gen_ordered_statistics,
))
eq_(result, [])
def test_normal():
"""
Test for normal data.
"""
transaction_manager = Mock(spec=TransactionManager)
min_support = 0.1
min_confidence = 0.2
max_length = 2
support_record = SupportRecord(frozenset(['A', 'B']), 0.5)
ordered_statistic1 = OrderedStatistic(
frozenset(['A']), frozenset(['B']), 0.1, 0.7)
ordered_statistic2 = OrderedStatistic(
frozenset(['A']), frozenset(['B']), 0.3, 0.5)
def gen_support_records(*args):
""" Mock for apyori.gen_support_records. """
eq_(args[1], min_support)
eq_(args[2], max_length)
yield support_record
def gen_ordered_statistics(*_):
""" Mock for apyori.gen_ordered_statistics. """
yield ordered_statistic1
yield ordered_statistic2
result = list(apriori(
transaction_manager,
min_support=min_support,
min_confidence=min_confidence,
max_length=max_length,
_gen_support_records=gen_support_records,
_gen_ordered_statistics=gen_ordered_statistics,
))
eq_(result, [RelationRecord(
support_record.items, support_record.support, [ordered_statistic2]
)])
| """
Tests for apyori.apriori.
"""
from nose.tools import eq_
from mock import Mock
from apyori import TransactionManager
from apyori import SupportRecord
from apyori import RelationRecord
from apyori import OrderedStatistic
from apyori import apriori
def test_empty():
"""
Test for empty data.
"""
transaction_manager = Mock(spec=TransactionManager)
def gen_support_records(*args):
return iter([])
def gen_ordered_statistics(*args):
yield OrderedStatistic(
frozenset(['A']), frozenset(['B']), 0.1, 0.7)
result = list(apriori(
transaction_manager,
_gen_support_records=gen_support_records,
_gen_ordered_statistics=gen_ordered_statistics,
))
eq_(result, [])
def test_normal():
"""
Test for normal data.
"""
transaction_manager = Mock(spec=TransactionManager)
min_support = 0.1
min_confidence = 0.2
max_length = 2
support_record = SupportRecord(frozenset(['A', 'B']), 0.5)
ordered_statistic1 = OrderedStatistic(
frozenset(['A']), frozenset(['B']), 0.1, 0.7)
ordered_statistic2 = OrderedStatistic(
frozenset(['A']), frozenset(['B']), 0.3, 0.5)
def gen_support_records(*args):
eq_(args[1], min_support)
eq_(args[2], max_length)
yield support_record
def gen_ordered_statistics(*args):
yield ordered_statistic1
yield ordered_statistic2
result = list(apriori(
transaction_manager,
min_support=min_support,
min_confidence=min_confidence,
max_length=max_length,
_gen_support_records=gen_support_records,
_gen_ordered_statistics=gen_ordered_statistics,
))
eq_(result, [RelationRecord(
support_record.items, support_record.support, [ordered_statistic2]
)])
| mit | Python |
1ba16f0bbfa203ee4a0d2fa0d15318912757f840 | set an analog and a digital pin | MrYsLab/PyMata | examples/capability_test.py | examples/capability_test.py | #!/usr/bin/python
"""
Copyright (c) 2013-2015 Alan Yorinks All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This file demonstrates how to retrieve capability and configuration data.
The servo_config is set to illustrate the digital_response_table being marked
a servo device. Pin 12 is set to digital output and pin A0 is set to analog
imput.
Your output should like this:
PyMata Digital Response Table
[[0, 0, None], [0, 0, None], [0, 0, None], [0, 0, None], [0, 0, None],
[4, 0, None], [0, 0, None], [0, 0, None], [0, 0, None], [0, 0, None],
[0, 0, None], [0, 0, None], [1, 0, None], [0, 0, None], [0, 0, None],
[0, 0, None], [0, 0, None], [0, 0, None], [0, 0, None], [0, 0, None]]
PyMata Analog Response Table
[[0, 216, None], [0, 0, None], [0, 0, None], [0, 0, None], [0, 0, None],
[0, 0, None]]
"""
import time
import sys
import signal
from PyMata.pymata import PyMata
def signal_handler(sig, frame):
print('You pressed Ctrl+C')
if board is not None:
board.reset()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
# Create a PyMata instance
board = PyMata("/dev/ttyACM0")
# Set the pin mode
board.servo_config(5)
board.set_pin_mode(12, board.OUTPUT, board.DIGITAL)
board.set_pin_mode(0, board.INPUT, board.ANALOG)
# Send query request to Arduino
board.capability_query()
# Some boards take a long time to respond - adjust as needed
time.sleep(5)
print("Pin Capability Report")
print(board.get_capability_query_results())
print("PyMata Digital Response Table")
print(board.get_digital_response_table())
print("PyMata Analog Response Table")
print(board.get_analog_response_table())
| #!/usr/bin/python
__author__ = 'Copyright (c) 2015 Alan Yorinks All rights reserved.'
"""
Copyright (c) 2013 Alan Yorinks All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This file demonstrates how to retrieve capability and configuration data.
The servo_config is set to illustrate the digital_response_table being marked a servo device
"""
import time
import sys
import signal
from PyMata.pymata import PyMata
def signal_handler(sig, frame):
print('You pressed Ctrl+C!!!!')
if board is not None:
board.reset()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
# create a PyMata instance
board = PyMata("/dev/ttyACM0")
board.servo_config(5)
# send query request to Arduino
board.capability_query()
# some boards take a long time to respond - adjust as needed
time.sleep(5)
print("Pin Capability Report")
print(board.get_capability_query_results())
print("PyMata Digital Response Table")
print(board.get_digital_response_table())
print("PyMata Analog Response Table")
print(board.get_analog_response_table())
| agpl-3.0 | Python |
40d01669d64bbdfb06f73b28db019a1806a544e8 | Fix call to train_test_split in plot_latent_crf example. Scikit-learn's train_test_split changed its behaviour for nd samples in version 0.15. | amueller/pystruct,pystruct/pystruct,pystruct/pystruct,d-mittal/pystruct,d-mittal/pystruct,massmutual/pystruct,massmutual/pystruct,wattlebird/pystruct,wattlebird/pystruct,amueller/pystruct | examples/plot_latent_crf.py | examples/plot_latent_crf.py | """
===================
Latent Dynamics CRF
===================
Solving a 2d grid problem by introducing latent variable interactions.
The input data is the same as in plot_grid_crf, a cross pattern.
But now, the center is not given an extra state. That makes the problem
much harder to solve for a pairwise model.
We can still solve it by introducing latent dynamics. In essence we allow
an additional state with different interactions, that maps to the same
state (the cross) in the ground truth.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cross_validation import train_test_split
from pystruct.models import LatentGridCRF
from pystruct.learners import LatentSSVM, OneSlackSSVM
from pystruct.datasets import generate_crosses
X, Y = generate_crosses(n_samples=20, noise=5, n_crosses=1, total_size=8)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=.5,
allow_nd=True)
crf = LatentGridCRF(n_states_per_label=[1, 2])
base_ssvm = OneSlackSSVM(model=crf, C=10., n_jobs=-1, inference_cache=20,
tol=.1)
clf = LatentSSVM(base_ssvm=base_ssvm)
clf.fit(X_train, Y_train)
print("loss training set: %f" % clf.score(X_train, Y_train))
print("loss test set: %f" % clf.score(X_test, Y_test))
Y_pred = clf.predict(X_test)
x, y, y_pred = X_test[1], Y_test[1], Y_pred[1]
fig, ax = plt.subplots(3, 2)
ax[0, 0].matshow(y, vmin=0, vmax=crf.n_labels - 1)
ax[0, 0].set_title("ground truth")
ax[0, 1].matshow(np.argmax(x, axis=-1),
vmin=0, vmax=crf.n_labels - 1)
ax[0, 1].set_title("unaries only")
ax[1, 0].set_visible(False)
ax[1, 1].matshow(crf.latent(x, y, clf.w),
vmin=0, vmax=crf.n_states - 1)
ax[1, 1].set_title("latent final")
ax[2, 0].matshow(crf.inference(x, clf.w),
vmin=0, vmax=crf.n_states - 1)
ax[2, 0].set_title("prediction latent")
ax[2, 1].matshow(y_pred,
vmin=0, vmax=crf.n_labels - 1)
ax[2, 1].set_title("prediction")
for a in ax.ravel():
a.set_xticks(())
a.set_yticks(())
plt.show()
| """
===================
Latent Dynamics CRF
===================
Solving a 2d grid problem by introducing latent variable interactions.
The input data is the same as in plot_grid_crf, a cross pattern.
But now, the center is not given an extra state. That makes the problem
much harder to solve for a pairwise model.
We can still solve it by introducing latent dynamics. In essence we allow
an additional state with different interactions, that maps to the same
state (the cross) in the ground truth.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cross_validation import train_test_split
from pystruct.models import LatentGridCRF
from pystruct.learners import LatentSSVM, OneSlackSSVM
from pystruct.datasets import generate_crosses
X, Y = generate_crosses(n_samples=20, noise=5, n_crosses=1, total_size=8)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=.5)
crf = LatentGridCRF(n_states_per_label=[1, 2])
base_ssvm = OneSlackSSVM(model=crf, C=10., n_jobs=-1, inference_cache=20,
tol=.1)
clf = LatentSSVM(base_ssvm=base_ssvm)
clf.fit(X_train, Y_train)
print("loss training set: %f" % clf.score(X_train, Y_train))
print("loss test set: %f" % clf.score(X_test, Y_test))
Y_pred = clf.predict(X_test)
x, y, y_pred = X_test[1], Y_test[1], Y_pred[1]
fig, ax = plt.subplots(3, 2)
ax[0, 0].matshow(y, vmin=0, vmax=crf.n_labels - 1)
ax[0, 0].set_title("ground truth")
ax[0, 1].matshow(np.argmax(x, axis=-1),
vmin=0, vmax=crf.n_labels - 1)
ax[0, 1].set_title("unaries only")
ax[1, 0].set_visible(False)
ax[1, 1].matshow(crf.latent(x, y, clf.w),
vmin=0, vmax=crf.n_states - 1)
ax[1, 1].set_title("latent final")
ax[2, 0].matshow(crf.inference(x, clf.w),
vmin=0, vmax=crf.n_states - 1)
ax[2, 0].set_title("prediction latent")
ax[2, 1].matshow(y_pred,
vmin=0, vmax=crf.n_labels - 1)
ax[2, 1].set_title("prediction")
for a in ax.ravel():
a.set_xticks(())
a.set_yticks(())
plt.show()
| bsd-2-clause | Python |
6502243f05a69a2cc1ec4ef2f4e36af1f2e9797f | Update version.py | CGATOxford/UMI-tools | umi_tools/version.py | umi_tools/version.py | __version__ = "0.2.5"
| __version__ = "0.2.4"
| mit | Python |
f3bc6b072aee3bcf194733dc2e06a7f199def59a | Fix DNS information expiration | guillaume-philippon/aquilon,quattor/aquilon,stdweird/aquilon,quattor/aquilon,guillaume-philippon/aquilon,quattor/aquilon,guillaume-philippon/aquilon,stdweird/aquilon,stdweird/aquilon | lib/python2.6/aquilon/server/dbwrappers/dns.py | lib/python2.6/aquilon/server/dbwrappers/dns.py | # ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
#
# Copyright (C) 2008,2009,2010,2011 Contributor
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the EU DataGrid Software License. You should
# have received a copy of the license with this program, and the
# license is published at
# http://eu-datagrid.web.cern.ch/eu-datagrid/license.html.
#
# THE FOLLOWING DISCLAIMER APPLIES TO ALL SOFTWARE CODE AND OTHER
# MATERIALS CONTRIBUTED IN CONNECTION WITH THIS PROGRAM.
#
# THIS SOFTWARE IS LICENSED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE AND ANY WARRANTY OF NON-INFRINGEMENT, ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. THIS
# SOFTWARE MAY BE REDISTRIBUTED TO OTHERS ONLY BY EFFECTIVELY USING
# THIS OR ANOTHER EQUIVALENT DISCLAIMER AS WELL AS ANY OTHER LICENSE
# TERMS THAT MAY APPLY.
""" Helpers for managing DNS-related objects """
from sqlalchemy.orm import object_session
from aquilon.aqdb.model import Fqdn, DnsRecord
def delete_dns_record(dbdns_rec):
"""
Delete a DNS record
Deleting a DNS record is a bit tricky because we do not want to keep
orphaned FQDN entries.
"""
session = object_session(dbdns_rec)
# Lock the FQDN
q = session.query(Fqdn)
q = q.filter_by(id=dbdns_rec.fqdn_id)
q = q.with_lockmode('update')
dbfqdn = q.one()
# Delete the DNS record
session.delete(dbdns_rec)
session.flush()
# Delete the FQDN if it is orphaned
q = session.query(DnsRecord)
q = q.filter_by(fqdn_id=dbfqdn.id)
if q.count() == 0:
session.delete(dbfqdn)
else:
session.expire(dbfqdn, ['dns_records'])
| # ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
#
# Copyright (C) 2008,2009,2010,2011 Contributor
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the EU DataGrid Software License. You should
# have received a copy of the license with this program, and the
# license is published at
# http://eu-datagrid.web.cern.ch/eu-datagrid/license.html.
#
# THE FOLLOWING DISCLAIMER APPLIES TO ALL SOFTWARE CODE AND OTHER
# MATERIALS CONTRIBUTED IN CONNECTION WITH THIS PROGRAM.
#
# THIS SOFTWARE IS LICENSED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE AND ANY WARRANTY OF NON-INFRINGEMENT, ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. THIS
# SOFTWARE MAY BE REDISTRIBUTED TO OTHERS ONLY BY EFFECTIVELY USING
# THIS OR ANOTHER EQUIVALENT DISCLAIMER AS WELL AS ANY OTHER LICENSE
# TERMS THAT MAY APPLY.
""" Helpers for managing DNS-related objects """
from sqlalchemy.orm import object_session
from aquilon.aqdb.model import Fqdn, DnsRecord
def delete_dns_record(dbdns_rec):
"""
Delete a DNS record
Deleting a DNS record is a bit tricky because we do not want to keep
orphaned FQDN entries.
"""
session = object_session(dbdns_rec)
# Lock the FQDN
q = session.query(Fqdn)
q = q.filter_by(id=dbdns_rec.fqdn_id)
q = q.with_lockmode('update')
dbfqdn = q.one()
# Delete the DNS record
session.delete(dbdns_rec)
session.flush()
# Delete the FQDN if it is orphaned
q = session.query(DnsRecord)
q = q.filter_by(fqdn_id=dbfqdn.id)
if q.count() == 0:
session.delete(dbfqdn)
else:
session.expire(dbfqdn, 'dns_records')
| apache-2.0 | Python |
9a3f69964d405def11454175937204f1b20b9d81 | update inidb.py, considering if it's still necessary | Jailman/RaspberryPiRobot,Jailman/RaspberryPiRobot,Jailman/RaspberryPiRobot,Jailman/RaspberryPiRobot | Web-Terminal/Modules/initDB.py | Web-Terminal/Modules/initDB.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Init database
Create tables
Insert data
Query data
"""
from sqlalchemy import *
from sqlalchemy.orm import *
#define egine
engine = create_engine('sqlite:///../DB/raspberrypi.db')
#bind metadata
metadata = MetaData(engine)
#create tables, init database
# pilot = Table('pilot', metadata,
# Column('id', Integer, primary_key = True),
# Column('name', String(255), unique = True),
# Column('fullname', String(255)),
# Column('email', String(255), unique = True),
# Column('password', String(255))
# )
#
# air = Table('air', metadata,
# Column('id', Integer, primary_key = True),
# Column('date', DateTime, nullable=False),
# Column('temperature', Float),
# Column('humidity', Float),
# Column('infaredetector', Boolean)
# )
#
# joystick = Table('joystick', metadata,
# Column('id', Integer, primary_key = True),
# Column('gpio', Integer),
# Column('function', String(255)),
# Column('device', String(255))
# )
#create tables, if tables exist, pass
metadata.create_all(engine)
#get db connected
conn = engine.connect()
def insert_data(table, data):
i = table.insert()
conn.execute(i, data)
# print r1.inserted_primary_key
#func usage
# data = dict(name='jailman', fullname='jailman lobo', email='jailman@sina.com', password='damnyou')
# insert_data(pilot, data)
def query(flag, table, *columns):
if flag:
s1 = select([table])
r1 = conn.execute(s1)
return r1.fetchall()
else:
qry = ''
for i in columns:
qry = qry + 'table.c.' + i + ','
qry = 'select([' + qry + '])'
s2 = eval(qry)
r2 = conn.execute(s2)
return r2.fetchall()
#func usage
# print query(True, pilot, "name", "fullname") | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Init database
Create tables
Insert data
Query data
"""
from sqlalchemy import *
from sqlalchemy.orm import *
#define egine
engine = create_engine('sqlite:///../DB/raspberrypirobot.db')
#bind metadata
metadata = MetaData(engine)
#create tables, init database
pilot = Table('pilot', metadata,
Column('id', Integer, primary_key = True),
Column('name', String(255), unique = True),
Column('fullname', String(255)),
Column('email', String(255), unique = True),
Column('password', String(255))
)
air = Table('air', metadata,
Column('id', Integer, primary_key = True),
Column('date', DateTime, nullable=False),
Column('temperature', Float),
Column('humidity', Float),
Column('infaredetector', Boolean)
)
joystick = Table('joystick', metadata,
Column('id', Integer, primary_key = True),
Column('gpio', Integer),
Column('function', String(255)),
Column('device', String(255))
)
#create tables, if tables exist, pass
metadata.create_all(engine)
#get db connected
conn = engine.connect()
def insert_data(table, data):
i = table.insert()
r1 = conn.execute(i, data)
# print r1.inserted_primary_key
#func usage
# data = dict(name='jailman', fullname='jailman lobo', email='jailman@sina.com', password='damnyou')
# insert_data(pilot, data)
def query(flag, table, *columns):
if flag:
s1 = select([table])
r1 = conn.execute(s1)
return r1.fetchall()
else:
qry = ''
for i in columns:
qry = qry + 'table.c.' + i + ','
qry = 'select([' + qry + '])'
s2 = eval(qry)
r2 = conn.execute(s2)
return r2.fetchall()
#func usage
# print query(True, pilot, "name", "fullname") | apache-2.0 | Python |
ddc5a7f3868c5ba311e307a4d84777a3c0a5c087 | add utf8 note | vtmp/restro-tracer | rtracer/util/__init__.py | rtracer/util/__init__.py | # -*- coding: utf-8 -*-
from .data_handling import *
| from .data_handling import *
| mit | Python |
dde43f917bc12d269c3320174527a934bdfb4e70 | add geocoords | clld/glottolog3,clld/glottolog3 | migrations/versions/20b0b763f2f9_iso_updates.py | migrations/versions/20b0b763f2f9_iso_updates.py | # coding=utf-8
"""iso updates
Revision ID: 20b0b763f2f9
Revises: 53f4e74ce460
Create Date: 2014-10-09 12:51:21.612004
"""
# revision identifiers, used by Alembic.
revision = '20b0b763f2f9'
down_revision = '53f4e74ce460'
import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
"""
gmo 6.57645 37.1381
stc -10.6771 165.835
lto 0.2975 34.6961
lks 0.1529 34.5708
lsm 0.3652 34.0335
lrt -8.4746 122.7619
aqz -12.8322 -60.9716
"""
pass
def downgrade():
pass
| # coding=utf-8
"""iso updates
Revision ID: 20b0b763f2f9
Revises: 53f4e74ce460
Create Date: 2014-10-09 12:51:21.612004
"""
# revision identifiers, used by Alembic.
revision = '20b0b763f2f9'
down_revision = '53f4e74ce460'
import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
pass
def downgrade():
pass
| mit | Python |
7b85834a7c3e8b56fbaf2c3f347714e0bed08e13 | Bump app version to 2019.5.2 | kernelci/kernelci-backend,kernelci/kernelci-backend | app/handlers/__init__.py | app/handlers/__init__.py | __version__ = "2019.5.2"
__versionfull__ = __version__
| __version__ = "2019.5.1"
__versionfull__ = __version__
| lgpl-2.1 | Python |
d331a36abbedc144fcffa75acf0f601209b8e2fb | Improve the handling of ssl._create_unverified_context() | William-Yeh/ansible-monit | files/check-latest-monit.py | files/check-latest-monit.py | #!/usr/bin/env python
#
# Check the version of latest binary distribution of monit.
#
# USAGE:
# progname
#
#
# @see https://mmonit.com/monit/dist/binary/
#
import sys
import re
import urllib
import ssl
MONIT_DIST_LINK = "http://mmonit.com/monit/dist/binary/"
REGEX_MONIT_LIST = re.compile('\salt="\[DIR\]"></td><td><a href="([^"]+)/"')
monit_versions = []
def enumerate_versions():
global monit_versions
f = None
try:
f = urllib.urlopen(MONIT_DIST_LINK)
except IOError:
# ignore [SSL: CERTIFICATE_VERIFY_FAILED] error;
# @see http://stackoverflow.com/a/28052583/714426
context = ssl._create_unverified_context()
f = urllib.urlopen(MONIT_DIST_LINK, context=context)
content = f.read()
for line in content.splitlines():
m = REGEX_MONIT_LIST.search(line)
if m:
monit_versions.append(m.group(1))
def report_latest_version():
print '{ "ok": true, "version": "%s" }' % monit_versions[-1]
sys.exit(0)
def report_none():
print '{ "ok": false, "version": "0" }'
sys.exit(0)
try:
enumerate_versions()
if len(monit_versions) > 0:
report_latest_version()
else:
report_none()
except IOError:
report_none()
| #!/usr/bin/env python
#
# Check the version of latest binary distribution of monit.
#
# USAGE:
# progname
#
#
# @see https://mmonit.com/monit/dist/binary/
#
import sys
import re
import urllib
import ssl
MONIT_DIST_LINK = "http://mmonit.com/monit/dist/binary/"
REGEX_MONIT_LIST = re.compile('\salt="\[DIR\]"></td><td><a href="([^"]+)/"')
monit_versions = []
def enumerate_versions():
global monit_versions
# ignore [SSL: CERTIFICATE_VERIFY_FAILED] error;
# @see http://stackoverflow.com/a/28052583/714426
f = urllib.urlopen(MONIT_DIST_LINK, context = ssl._create_unverified_context())
content = f.read()
for line in content.splitlines():
m = REGEX_MONIT_LIST.search(line)
if m:
monit_versions.append(m.group(1))
def report_latest_version():
print '{ "ok": true, "version": "%s" }' % monit_versions[-1]
sys.exit(0)
def report_none():
print '{ "ok": false, "version": "0" }'
sys.exit(0)
try:
enumerate_versions()
if len(monit_versions) > 0:
report_latest_version()
else:
report_none()
except IOError:
report_none()
| mit | Python |
3dd23df07d7d1f84e361c87345aafcfefeff636a | Order agonistic options to control vacuum gripper | pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc | jsk_2016_01_baxter_apc/node_scripts/control_vacuum_gripper.py | jsk_2016_01_baxter_apc/node_scripts/control_vacuum_gripper.py | #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
74e9f4f4f64ed0c9501d2527b3948dbcde423cc6 | Increase log verbosity on exception | tdickman/jetcom-crawl | jetcomcrawl/modes/items.py | jetcomcrawl/modes/items.py | from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
try:
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
except:
logging.info(html.text)
raise
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
| from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
| mit | Python |
e6720d0ccab5377195d469c158192087ca591fae | fix example | openafs-contrib/afspy,openafs-contrib/afspy | examples/showVolumeGroup.py | examples/showVolumeGroup.py | #!/usr/bin/env python
import sys, os
sys.path.append("..")
from afs.util.AfsConfig import AfsConfig, setupDefaultConfig
from afs.util.options import define, options
from afs.service.VolService import VolService
import afs
setupDefaultConfig()
afs.defaultConfig.AFSCell="desy.de"
volMng = VolService()
VolName="root.cell"
VolG=volMng.getVolGroup(VolName)
print VolG
for v in VolG["RO"] :
vol=volMng.getVolume(v["id"],v["serv"],v["part"])
print vol
| #!/usr/bin/env python
import sys, os
sys.path.append("..")
from afs.util.AfsConfig import AfsConfig, setupDefaultConfig
from afs.util.options import define, options
from afs.service.VolService import VolService
import afs
setupDefaultConfig()
afs.defaultConfig.AFSCell="desy.de"
volMng = VolService()
VolName="root.cell"
VolG=volMng.getVolGroup(VolName)
print VolG
for v in VolG["RO"] :
vol = volMng.getVolume('root.cell',v["serv"],v["part"])
print vol
| bsd-2-clause | Python |
eb575e511369b9ece61bb3543496f47ff37fdc3d | Fix socket transport | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon_client/transport/socket_transport.py | polyaxon_client/transport/socket_transport.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import websocket
from polyaxon_client.logger import logger
class SocketTransportMixin(object):
"""Socket operations transport."""
def socket(self, url, message_handler, headers=None):
webs = websocket.WebSocketApp(
url,
on_message=lambda ws, message: self._on_message(message_handler, message),
on_error=self._on_error,
on_close=self._on_close,
header=self._get_headers(headers)
)
return webs
def stream(self, url, message_handler, headers=None):
webs = self.socket(url=url, message_handler=message_handler, headers=headers)
webs.run_forever(ping_interval=30, ping_timeout=10)
def _on_message(self, message_handler, message):
if message_handler and message:
message_handler(json.loads(message).decode('utf-8'))
@staticmethod
def _on_error(ws, error):
if isinstance(error, (KeyboardInterrupt, SystemExit)):
logger.info('Quitting... The session will be running in the background.')
else:
logger.debug('Termination cause: %s', error)
logger.debug('Session disconnected.')
@staticmethod
def _on_close(ws):
logger.info('Session ended')
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import threading
import websocket
from polyaxon_client.logger import logger
from polyaxon_client.workers.socket_worker import SocketWorker
class SocketTransportMixin(object):
"""Socket operations transport."""
def socket(self, url, message_handler, headers=None):
webs = websocket.WebSocketApp(
url,
on_message=lambda ws, message: self._on_message(message_handler, message),
on_error=self._on_error,
on_close=self._on_close,
header=self._get_headers(headers)
)
return webs
def stream(self, url, message_handler, headers=None):
webs = self.socket(url=url, message_handler=message_handler, headers=headers)
webs.run_forever(ping_interval=30, ping_timeout=10)
def _on_message(self, message_handler, message):
if message_handler and message:
message_handler(json.loads(message).decode('utf-8'))
@staticmethod
def _on_error(ws, error):
if isinstance(error, (KeyboardInterrupt, SystemExit)):
logger.info('Quitting... The session will be running in the background.')
else:
logger.debug('Termination cause: %s', error)
logger.debug('Session disconnected.')
@staticmethod
def _on_close(ws):
logger.info('Session ended')
| apache-2.0 | Python |
c8a9ef23668582098fe5c8beff0cf83530108c93 | implement test for different unit comparison | morgenst/pyfluka | tests/Calculators.py | tests/Calculators.py | import unittest
import utils.PhysicsQuantities as PQ
from plugins.SimpleCalculator import AoverLECalculator, SpecificActivityCalculator
from utils import ureg
class TestCalculator(unittest.TestCase):
def setUp(self):
self.dataIsotopeSpecificAct = {"det1": {'Isotope': [PQ.Isotope(3, 1)], 'SpecificActivity': [PQ.SpecificActivity(10.)]}}
self.dataIsotopeAct = {"det1": {'Mass' : PQ.Mass(10., ureg.kg), 'Isotope': [PQ.Isotope(3, 1)], 'Activity': [PQ.Activity(10.)]}}
self.AoverLECalculator = AoverLECalculator()
def testAoverLESimple(self):
self.AoverLECalculator.invoke(self.dataIsotopeSpecificAct)
self.assertEqual(self.dataIsotopeSpecificAct["det1"]["AoverLE"], [PQ.AoverLE(10. / 2.00E+005)])
def testAoverLEWrongInput(self):
self.assertRaises(ValueError, self.AoverLECalculator.invoke, {"det1": {'Isotope': [], 'Activity': []}})
def testSpecificActivitySimple(self):
calculator = SpecificActivityCalculator()
calculator.invoke(self.dataIsotopeAct)
self.assertEqual(self.dataIsotopeAct["det1"]["SpecificActivity"], [PQ.SpecificActivity(1.)])
def testSpecificActivitySimpleDiffMassUnit(self):
data = self.dataIsotopeAct
data["det1"]["Mass"] = PQ.Mass(10., ureg.g)
calculator = SpecificActivityCalculator()
calculator.invoke(data)
tmp = PQ.SpecificActivity(0.001)
print self.dataIsotopeAct["det1"]["SpecificActivity"][0].val
print tmp.val
self.assertEqual(self.dataIsotopeAct["det1"]["SpecificActivity"], [PQ.SpecificActivity(1000.)]) | import unittest
import utils.PhysicsQuantities as PQ
from plugins.SimpleCalculator import AoverLECalculator, SpecificActivityCalculator
from utils import ureg
class TestCalculator(unittest.TestCase):
def setUp(self):
self.dataIsotopeSpecificAct = {"det1": {'Isotope': [PQ.Isotope(3, 1)], 'SpecificActivity': [PQ.SpecificActivity(10.)]}}
self.dataIsotopeAct = {"det1": {'Mass' : PQ.Mass(10., ureg.kg), 'Isotope': [PQ.Isotope(3, 1)], 'Activity': [PQ.Activity(10.)]}}
self.AoverLECalculator = AoverLECalculator()
def testAoverLESimple(self):
self.AoverLECalculator.invoke(self.dataIsotopeSpecificAct)
self.assertEqual(self.dataIsotopeSpecificAct["det1"]["AoverLE"], [PQ.AoverLE(10. / 2.00E+005)])
def testAoverLEWrongInput(self):
self.assertRaises(ValueError, self.AoverLECalculator.invoke, {"det1": {'Isotope': [], 'Activity': []}})
def testSpecificActivitySimple(self):
calculator = SpecificActivityCalculator()
calculator.invoke(self.dataIsotopeAct)
tmp = PQ.SpecificActivity(1.)
self.assertEqual(self.dataIsotopeAct["det1"]["SpecificActivity"], [PQ.SpecificActivity(1.)]) | mit | Python |
5dbb552bc54904b1e3f4f4ac45d7192fe6107c3c | Read stopwords.txt when initialize, to avoid reading it in every update. | crista/exercises-in-programming-style,jw0201/exercises-in-programming-style,mathkann/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,Drooids/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,placrosse/exercises-in-programming-style,halagoing/exercises-in-programming-style,panesofglass/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,matk86/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,mathkann/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,matk86/exercises-in-programming-style,emil-mi/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,emil-mi/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,folpindo/exercises-in-programming-style,bgamwell/exercises-in-programming-style,crista/exercises-in-programming-style,halagoing/exercises-in-programming-style,matk86/exercises-in-programming-style,mathkann/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,halagoing/exercises-in-programming-style,folpindo/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,jw0201/exercises-in-programming-style,matk86/exercises-in-programming-style,placrosse/exercises-in-programming-style,halagoing/exercises-in-programming-style,jw0201/exercises-in-programming-style,halagoing/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,placrosse/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,Drooids/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,panesofglass/exercises-in-programming-style,jw0201/exercises-in-programming-style,Drooids/exercises-in-programming-style,crista/exercises-in-programming-style,mathkann/exercises-in-programming-style,panesofglass/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,bgamwell/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,jw0201/exercises-in-programming-style,folpindo/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,bgamwell/exercises-in-programming-style,emil-mi/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,placrosse/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,bgamwell/exercises-in-programming-style,Drooids/exercises-in-programming-style,crista/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,mathkann/exercises-in-programming-style,folpindo/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,Drooids/exercises-in-programming-style,panesofglass/exercises-in-programming-style,matk86/exercises-in-programming-style,folpindo/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,placrosse/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,crista/exercises-in-programming-style,emil-mi/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,bgamwell/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,emil-mi/exercises-in-programming-style,panesofglass/exercises-in-programming-style | 32-trinity/tf-32.py | 32-trinity/tf-32.py | #!/usr/bin/env python
import sys, re, operator, collections
class WordFrequenciesModel:
""" Models the data. In this case, we're only interested
in words and their frequencies as an end result """
freqs = {}
stopwords = set(open('../stop_words.txt').read().split(','))
def __init__(self, path_to_file):
self.update(path_to_file)
def update(self, path_to_file):
try:
words = re.findall('[a-z]{2,}', open(path_to_file).read().lower())
self.freqs = collections.Counter(w for w in words if w not in self.stopwords)
except IOError:
print "File not found"
self.freqs = {}
class WordFrequenciesView:
def __init__(self, model):
self._model = model
def render(self):
sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
for (w, c) in sorted_freqs[0:25]:
print w, '-', c
class WordFrequencyController:
def __init__(self, model, view):
self._model, self._view = model, view
view.render()
def run(self):
while True:
print "Next file: "
sys.stdout.flush()
filename = sys.stdin.readline().strip()
self._model.update(filename)
self._view.render()
m = WordFrequenciesModel(sys.argv[1])
v = WordFrequenciesView(m)
c = WordFrequencyController(m, v)
c.run()
| #!/usr/bin/env python
import sys, re, operator, collections
class WordFrequenciesModel:
""" Models the data. In this case, we're only interested
in words and their frequencies as an end result """
freqs = {}
def __init__(self, path_to_file):
self.update(path_to_file)
def update(self, path_to_file):
try:
stopwords = set(open('../stop_words.txt').read().split(','))
words = re.findall('[a-z]{2,}', open(path_to_file).read().lower())
self.freqs = collections.Counter(w for w in words if w not in stopwords)
except IOError:
print "File not found"
self.freqs = {}
class WordFrequenciesView:
def __init__(self, model):
self._model = model
def render(self):
sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
for (w, c) in sorted_freqs[:25]:
print w, '-', c
class WordFrequencyController:
def __init__(self, model, view):
self._model, self._view = model, view
view.render()
def run(self):
while True:
print "Next file: "
sys.stdout.flush()
filename = sys.stdin.readline().strip()
self._model.update(filename)
self._view.render()
m = WordFrequenciesModel(sys.argv[1])
v = WordFrequenciesView(m)
c = WordFrequencyController(m, v)
c.run()
| mit | Python |
4578de68e7486d33fff6b1117293777af91679c4 | Redact data from zwave_js diagnostics (#68348) | nkgilley/home-assistant,w1ll1am23/home-assistant,toddeye/home-assistant,toddeye/home-assistant,mezz64/home-assistant,mezz64/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant | homeassistant/components/zwave_js/diagnostics.py | homeassistant/components/zwave_js/diagnostics.py | """Provides diagnostics for Z-Wave JS."""
from __future__ import annotations
from zwave_js_server.client import Client
from zwave_js_server.dump import dump_msgs
from zwave_js_server.model.node import NodeDataType
from homeassistant.components.diagnostics.util import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DATA_CLIENT, DOMAIN
from .helpers import get_home_and_node_id_from_device_entry
TO_REDACT = ("homeId", "location")
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> list[dict]:
"""Return diagnostics for a config entry."""
msgs: list[dict] = await dump_msgs(
config_entry.data[CONF_URL], async_get_clientsession(hass)
)
return async_redact_data(msgs, TO_REDACT)
async def async_get_device_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry
) -> NodeDataType:
"""Return diagnostics for a device."""
client: Client = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT]
identifiers = get_home_and_node_id_from_device_entry(device)
node_id = identifiers[1] if identifiers else None
if node_id is None or node_id not in client.driver.controller.nodes:
raise ValueError(f"Node for device {device.id} can't be found")
node = client.driver.controller.nodes[node_id]
return {
"versionInfo": {
"driverVersion": client.version.driver_version,
"serverVersion": client.version.server_version,
"minSchemaVersion": client.version.min_schema_version,
"maxSchemaVersion": client.version.max_schema_version,
},
"state": async_redact_data(node.data, TO_REDACT),
}
| """Provides diagnostics for Z-Wave JS."""
from __future__ import annotations
from zwave_js_server.client import Client
from zwave_js_server.dump import dump_msgs
from zwave_js_server.model.node import NodeDataType
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DATA_CLIENT, DOMAIN
from .helpers import get_home_and_node_id_from_device_entry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> list[dict]:
"""Return diagnostics for a config entry."""
msgs: list[dict] = await dump_msgs(
config_entry.data[CONF_URL], async_get_clientsession(hass)
)
return msgs
async def async_get_device_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry
) -> NodeDataType:
"""Return diagnostics for a device."""
client: Client = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT]
identifiers = get_home_and_node_id_from_device_entry(device)
node_id = identifiers[1] if identifiers else None
if node_id is None or node_id not in client.driver.controller.nodes:
raise ValueError(f"Node for device {device.id} can't be found")
node = client.driver.controller.nodes[node_id]
return {
"versionInfo": {
"driverVersion": client.version.driver_version,
"serverVersion": client.version.server_version,
"minSchemaVersion": client.version.min_schema_version,
"maxSchemaVersion": client.version.max_schema_version,
},
"state": node.data,
}
| apache-2.0 | Python |
734b8c99ebbbc03fd0c0f4323b110cfe785d7340 | add length to table_field() | olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net | util/pyfits_utils.py | util/pyfits_utils.py | import pyfits
class tabledata(object):
def __init__(self):
self._length = 0
def __setattr__(self, name, val):
object.__setattr__(self, name, val)
def set(self, name,val):
self.__setattr__(name, val)
def getcolumn(self, name):
return self.__dict__[name.lower()]
def __len__(self):
return self._length
def table_fields(dataorfn):
pf = None
if isinstance(dataorfn, str):
pf = pyfits.open(dataorfn)
data = pf[1].data
else:
data = dataorfn
colnames = data.dtype.names
fields = tabledata()
for c in colnames:
fields.set(c.lower(), data.field(c))
fields._length = len(data)
if pf:
pf.close()
return fields
| import pyfits
class tabledata(object):
def __setattr__(self, name, val):
object.__setattr__(self, name, val)
def set(self, name,val):
self.__setattr__(name, val)
def getcolumn(self, name):
return self.__dict__[name.lower()]
def table_fields(dataorfn):
pf = None
if isinstance(dataorfn, str):
pf = pyfits.open(dataorfn)
data = pf[1].data
else:
data = dataorfn
colnames = data.dtype.names
fields = tabledata()
for c in colnames:
fields.set(c.lower(), data.field(c))
if pf:
pf.close()
return fields
| bsd-3-clause | Python |
10245467b393dcf6d8ee2733365afb0d8257c9e4 | Test commit | kurtwood/sw_assignment3,kurtwood/sw_assignment3,kurtwood/sw_assignment3 | facebook_calculate_likes.py | facebook_calculate_likes.py | # This is a test by Timo
# First, let's query for all of the likes in your social
# network and store them in a slightly more convenient
# data structure as a dictionary keyed on each friend's
# name.
import facebook
from prettytable import PrettyTable
from collections import Counter
# Create a connection to the Graph API with your access token
g = facebook.GraphAPI('CAACEdEose0cBAK3UIzHv6Du13RD7AZCFV0UDPPfBZBPMvk8uKNni8iKbPT9rGmVZCZAx3C3ZBLIW1lZBEkTjDu8gZBaEb6orDi7etAavuHbc7ZAJYw6dpasSjFVwzfHgdvEPPFosNZBwVg7LPep1kx5XRhiszKYplOwsmXQdocZBa1EBbeXCdcZC6zsyQjqYl74pKgZD')
friends = g.get_connections("me", "friends")['data']
likes = { friend['name'] : g.get_connections(friend['id'], "likes")['data']
for friend in friends }
#print likes
friends_likes = Counter([like['name']
for friend in likes
for like in likes[friend]
if like.get('name')])
pt = PrettyTable(field_names=['Name', 'Freq'])
pt.align['Name'], pt.align['Freq'] = 'l', 'r'
[ pt.add_row(fl) for fl in friends_likes.most_common(10) ]
print 'Top 10 likes amongst friends'
print pt
# Analyze all like categories by frequency
friends_likes_categories = Counter([like['category']
for friend in likes
for like in likes[friend]])
pt2 = PrettyTable(field_names=['Category', 'Freq'])
pt2.align['Category'], pt2.align['Freq'] = 'l', 'r'
[ pt2.add_row(flc) for flc in friends_likes_categories.most_common(10) ]
print "Top 10 like categories for friends"
print pt2 | # First, let's query for all of the likes in your social
# network and store them in a slightly more convenient
# data structure as a dictionary keyed on each friend's
# name.
import facebook
from prettytable import PrettyTable
from collections import Counter
# Create a connection to the Graph API with your access token
g = facebook.GraphAPI('CAACEdEose0cBAK3UIzHv6Du13RD7AZCFV0UDPPfBZBPMvk8uKNni8iKbPT9rGmVZCZAx3C3ZBLIW1lZBEkTjDu8gZBaEb6orDi7etAavuHbc7ZAJYw6dpasSjFVwzfHgdvEPPFosNZBwVg7LPep1kx5XRhiszKYplOwsmXQdocZBa1EBbeXCdcZC6zsyQjqYl74pKgZD')
friends = g.get_connections("me", "friends")['data']
likes = { friend['name'] : g.get_connections(friend['id'], "likes")['data']
for friend in friends }
#print likes
friends_likes = Counter([like['name']
for friend in likes
for like in likes[friend]
if like.get('name')])
pt = PrettyTable(field_names=['Name', 'Freq'])
pt.align['Name'], pt.align['Freq'] = 'l', 'r'
[ pt.add_row(fl) for fl in friends_likes.most_common(10) ]
print 'Top 10 likes amongst friends'
print pt
# Analyze all like categories by frequency
friends_likes_categories = Counter([like['category']
for friend in likes
for like in likes[friend]])
pt2 = PrettyTable(field_names=['Category', 'Freq'])
pt2.align['Category'], pt2.align['Freq'] = 'l', 'r'
[ pt2.add_row(flc) for flc in friends_likes_categories.most_common(10) ]
print "Top 10 like categories for friends"
print pt2 | bsd-3-clause | Python |
bf0e192b190efbde1b594cdf85c6552b343c2f0c | Use new API correctly, v2... | janmedlock/HIV-95-vaccine | run_samples.py | run_samples.py | #!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.simulation.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
| #!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.multisim.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
| agpl-3.0 | Python |
29cc8951e4485d0edeade28c5e6711ccaefe9551 | fix concatenation while editing files | sbhs-forkbombers/sbhs-timetable-python,sbhs-forkbombers/sbhs-timetable-python,sbhs-forkbombers/sbhs-timetable-python | sbhstimetable/jsconcat.py | sbhstimetable/jsconcat.py | # sbhs-timetable-python
# Copyright (C) 2015 Simon Shields, James Ye
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
def concat_js(dir,out='static/belltimes.concat.js'):
out = open(out, mode='w')
for i in sorted(os.listdir(dir)):
i = os.path.join(dir, i)
if os.path.isfile(i) and i.endswith('.js') and not i.endswith('.concat.js'):
with open(i) as f:
for l in f:
out.write(l)
out.write(';')
out.close()
if __name__ == "__main__":
import yaml
print("Concatenating script/* to static/belltimes.concat.js...")
concat_js('script')
print("Done!")
with open('config.yml') as c:
cfg = yaml.load(c)
if cfg['comp']['java_exe']:
import closure, subprocess
cl_path = closure.get_jar_filename()
print("Minifying javascript using closure compiler at " + cl_path + "...")
subprocess.call([cfg['comp']['java_exe'], '-jar', cl_path, '--js', 'static/belltimes.concat.js',
'--js_output_file=static/belltimes.concat.js', '--compilation_level', 'SIMPLE_OPTIMIZATIONS',
'--language_in', 'ECMASCRIPT5_STRICT'])
print("Done!")
| # sbhs-timetable-python
# Copyright (C) 2015 Simon Shields, James Ye
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
def concat_js(dir,out='static/belltimes.concat.js'):
out = open(out, mode='w')
for i in sorted(os.listdir(dir)):
i = os.path.join(dir, i)
if os.path.isfile(i):
with open(i) as f:
for l in f:
out.write(l)
out.write(';')
out.close()
if __name__ == "__main__":
import yaml
print("Concatenating script/* to static/belltimes.concat.js...")
concat_js('script')
print("Done!")
with open('config.yml') as c:
cfg = yaml.load(c)
if cfg['comp']['java_exe']:
import closure, subprocess
cl_path = closure.get_jar_filename()
print("Minifying javascript using closure compiler at " + cl_path + "...")
subprocess.call([cfg['comp']['java_exe'], '-jar', cl_path, '--js', 'static/belltimes.concat.js',
'--js_output_file=static/belltimes.concat.js', '--compilation_level', 'SIMPLE_OPTIMIZATIONS',
'--language_in', 'ECMASCRIPT5_STRICT'])
print("Done!")
| agpl-3.0 | Python |
f6b02f3959172e2f196dbc45cbdb8df46898f6ec | add call_back to run_tardis (#860) | kaushik94/tardis,kaushik94/tardis,kaushik94/tardis,kaushik94/tardis | tardis/base.py | tardis/base.py | # functions that are important for the general usage of TARDIS
def run_tardis(config, atom_data=None, simulation_callbacks=[]):
"""
This function is one of the core functions to run TARDIS from a given
config object.
It will return a model object containing
Parameters
----------
config: ~str or ~dict
filename of configuration yaml file or dictionary
atom_data: ~str or ~tardis.atomic.AtomData
if atom_data is a string it is interpreted as a path to a file storing
the atomic data. Atomic data to use for this TARDIS simulation. If set to None, the
atomic data will be loaded according to keywords set in the configuration
[default=None]
"""
from tardis.io.config_reader import Configuration
from tardis.io.atomic import AtomData
from tardis.simulation import Simulation
if atom_data is not None:
try:
atom_data = AtomData.from_hdf(atom_data)
except TypeError:
atom_data = atom_data
try:
tardis_config = Configuration.from_yaml(config)
except TypeError:
tardis_config = Configuration.from_config_dict(config)
simulation = Simulation.from_config(tardis_config, atom_data=atom_data)
for cb in simulation_callbacks:
simulation.add_callback(cb)
simulation.run()
return simulation
| # functions that are important for the general usage of TARDIS
def run_tardis(config, atom_data=None):
"""
This function is one of the core functions to run TARDIS from a given
config object.
It will return a model object containing
Parameters
----------
config: ~str or ~dict
filename of configuration yaml file or dictionary
atom_data: ~str or ~tardis.atomic.AtomData
if atom_data is a string it is interpreted as a path to a file storing
the atomic data. Atomic data to use for this TARDIS simulation. If set to None, the
atomic data will be loaded according to keywords set in the configuration
[default=None]
"""
from tardis.io.config_reader import Configuration
from tardis.io.atomic import AtomData
from tardis.simulation import Simulation
if atom_data is not None:
try:
atom_data = AtomData.from_hdf(atom_data)
except TypeError:
atom_data = atom_data
try:
tardis_config = Configuration.from_yaml(config)
except TypeError:
tardis_config = Configuration.from_config_dict(config)
simulation = Simulation.from_config(tardis_config, atom_data=atom_data)
simulation.run()
return simulation
| bsd-3-clause | Python |
9576a3077f8fff0d68267dbf5bb2e821ef92c46c | remove storage use in download | FederatedAI/FATE,FederatedAI/FATE,FederatedAI/FATE | fate_flow/utils/download.py | fate_flow/utils/download.py | #
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from arch.api import session
from arch.api.utils import log_utils, dtable_utils
LOGGER = log_utils.getLogger()
class Download(object):
def __init__(self):
self.taskid = ''
self.tracker = None
self.parameters = {}
def run(self, component_parameters=None, args=None):
self.parameters = component_parameters["DownloadParam"]
self.parameters["role"] = component_parameters["role"]
self.parameters["local"] = component_parameters["local"]
table_name, namespace = dtable_utils.get_table_info(config=self.parameters,
create=False)
job_id = "_".join(self.taskid.split("_")[:2])
session.init(job_id, self.parameters["work_mode"])
with open(os.path.abspath(self.parameters["output_path"]), "w") as fout:
data_table = session.get_data_table(name=table_name, namespace=namespace)
print('===== begin to export data =====')
lines = 0
for key, value in data_table.collect():
if not value:
fout.write(key + "\n")
else:
fout.write(key + self.parameters.get("delimitor", ",") + str(value) + "\n")
lines += 1
if lines % 2000 == 0:
print("===== export {} lines =====".format(lines))
print("===== export {} lines totally =====".format(lines))
print('===== export data finish =====')
print('===== export data file path:{} ====='.format(os.path.abspath(self.parameters["output_path"])))
def set_taskid(self, taskid):
self.taskid = taskid
def set_tracker(self, tracker):
self.tracker = tracker
| #
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from arch.api import session,storage
from arch.api.utils import log_utils, dtable_utils
LOGGER = log_utils.getLogger()
class Download(object):
def __init__(self):
self.taskid = ''
self.tracker = None
self.parameters = {}
def run(self, component_parameters=None, args=None):
self.parameters = component_parameters["DownloadParam"]
self.parameters["role"] = component_parameters["role"]
self.parameters["local"] = component_parameters["local"]
table_name, namespace = dtable_utils.get_table_info(config=self.parameters,
create=False)
job_id = "_".join(self.taskid.split("_")[:2])
session.init(job_id, self.parameters["work_mode"])
with open(os.path.abspath(self.parameters["output_path"]), "w") as fout:
data_table = storage.get_data_table(name=table_name, namespace=namespace)
print('===== begin to export data =====')
lines = 0
for key, value in data_table.collect():
if not value:
fout.write(key + "\n")
else:
fout.write(key + self.parameters.get("delimitor", ",") + str(value) + "\n")
lines += 1
if lines % 2000 == 0:
print("===== export {} lines =====".format(lines))
print("===== export {} lines totally =====".format(lines))
print('===== export data finish =====')
print('===== export data file path:{} ====='.format(os.path.abspath(self.parameters["output_path"])))
def set_taskid(self, taskid):
self.taskid = taskid
def set_tracker(self, tracker):
self.tracker = tracker
| apache-2.0 | Python |
d063aab1277fff1ec711bb85f6a925d95df58e15 | Add test_set_insert_customer_text_msg | chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,chenyang14/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,chenyang14/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,stvreumi/electronic-blackboard,SWLBot/electronic-blackboard | test/test_server_api.py | test/test_server_api.py | import unittest
import os,sys
cur_dir = os.path.dirname(__file__)
par_dir = os.path.dirname(cur_dir)
sys.path.append(par_dir)
from server_api import *
from mysql import *
class Server_api(unittest.TestCase):
def test_find_now_schedule(self):
with mysql() as db:
db.connect()
self.assertNotEqual(find_now_schedule(db),-1)
def test_check_bluetooth_mode_available(self):
ret = check_bluetooth_mode_available()
self.assertNotEqual(ret,-1)
def test_get_user_birthday(self):
try:
user_id = 1
get_user_birthday(user_id)
except:
self.fail("Failed with %s" % traceback.format_exc())
def test_set_insert_customer_text_msg(self):
ret = set_insert_customer_text_msg()
self.assertEqual(ret['result'],'success')
def suite():
cases = ['test_find_now_schedule','test_check_bluetooth_mode_available','test_get_user_birthday',
'test_set_insert_customer_text_msg']
suite = unittest.TestSuite()
for case in cases:
suite.addTest(Server_api(case))
return suite
if __name__ == "__main__":
unittest.main()
| import unittest
import os,sys
cur_dir = os.path.dirname(__file__)
par_dir = os.path.dirname(cur_dir)
sys.path.append(par_dir)
from server_api import *
from mysql import *
class Server_api(unittest.TestCase):
def test_find_now_schedule(self):
with mysql() as db:
db.connect()
self.assertNotEqual(find_now_schedule(db),-1)
def test_check_bluetooth_mode_available(self):
ret = check_bluetooth_mode_available()
self.assertNotEqual(ret,-1)
def test_get_user_birthday(self):
try:
user_id = 1
get_user_birthday(user_id)
except:
self.fail("Failed with %s" % traceback.format_exc())
def suite():
cases = ['test_find_now_schedule','test_check_bluetooth_mode_available','test_get_user_birthday']
suite = unittest.TestSuite()
for case in cases:
suite.addTest(Server_api(case))
return suite
if __name__ == "__main__":
unittest.main()
| apache-2.0 | Python |
6f462b45d7dd6bc5a0d49a3329c592d32c610b9f | Update archivebox/core/forms.py | pirate/bookmark-archiver,pirate/bookmark-archiver,pirate/bookmark-archiver | archivebox/core/forms.py | archivebox/core/forms.py | __package__ = 'archivebox.core'
from django import forms
from ..util import URL_REGEX
from .utils_taggit import edit_string_for_tags, parse_tags
CHOICES = (
('0', 'depth = 0 (archive just these URLs)'),
('1', 'depth = 1 (archive these URLs and all URLs one hop away)'),
)
from ..extractors import get_default_archive_methods
ARCHIVE_METHODS = [
(name, name)
for name, _, _ in get_default_archive_methods()
]
class AddLinkForm(forms.Form):
url = forms.RegexField(label="URLs (one per line)", regex=URL_REGEX, min_length='6', strip=True, widget=forms.Textarea, required=True)
depth = forms.ChoiceField(label="Archive depth", choices=CHOICES, widget=forms.RadioSelect, initial='0')
archive_methods = forms.MultipleChoiceField(
required=False,
widget=forms.SelectMultiple,
choices=ARCHIVE_METHODS,
)
class TagWidgetMixin:
def format_value(self, value):
if value is not None and not isinstance(value, str):
value = edit_string_for_tags(value)
return super().format_value(value)
class TagWidget(TagWidgetMixin, forms.TextInput):
pass
class TagField(forms.CharField):
widget = TagWidget
def clean(self, value):
value = super().clean(value)
try:
return parse_tags(value)
except ValueError:
raise forms.ValidationError(
"Please provide a comma-separated list of tags."
)
def has_changed(self, initial_value, data_value):
# Always return False if the field is disabled since self.bound_data
# always uses the initial value in this case.
if self.disabled:
return False
try:
data_value = self.clean(data_value)
except forms.ValidationError:
pass
if initial_value is None:
initial_value = []
initial_value = [tag.name for tag in initial_value]
initial_value.sort()
return initial_value != data_value
| __package__ = 'archivebox.core'
from django import forms
from ..util import URL_REGEX
from .utils_taggit import edit_string_for_tags, parse_tags
CHOICES = (
('0', 'depth = 0 (archive just these URLs)'),
('1', 'depth = 1 (archive these URLs and all URLs one hop away)'),
)
ARCHIVE_METHODS = [
('title', 'title'),
('favicon', 'favicon'),
('wget', 'wget'),
('warc', 'warc'),
('pdf', 'pdf'),
('screenshot', 'screenshot'),
('dom', 'dom'),
('singlefile', 'singlefile'),
('git', 'git'),
('media', 'media'),
('archive_org', 'archive_org'),
]
class AddLinkForm(forms.Form):
url = forms.RegexField(label="URLs (one per line)", regex=URL_REGEX, min_length='6', strip=True, widget=forms.Textarea, required=True)
depth = forms.ChoiceField(label="Archive depth", choices=CHOICES, widget=forms.RadioSelect, initial='0')
archive_methods = forms.MultipleChoiceField(
required=False,
widget=forms.SelectMultiple,
choices=ARCHIVE_METHODS,
)
class TagWidgetMixin:
def format_value(self, value):
if value is not None and not isinstance(value, str):
value = edit_string_for_tags(value)
return super().format_value(value)
class TagWidget(TagWidgetMixin, forms.TextInput):
pass
class TagField(forms.CharField):
widget = TagWidget
def clean(self, value):
value = super().clean(value)
try:
return parse_tags(value)
except ValueError:
raise forms.ValidationError(
"Please provide a comma-separated list of tags."
)
def has_changed(self, initial_value, data_value):
# Always return False if the field is disabled since self.bound_data
# always uses the initial value in this case.
if self.disabled:
return False
try:
data_value = self.clean(data_value)
except forms.ValidationError:
pass
if initial_value is None:
initial_value = []
initial_value = [tag.name for tag in initial_value]
initial_value.sort()
return initial_value != data_value
| mit | Python |
75f9eba8a20d93132a888c4f01d65e8962bd3d7d | add id field to MetaDataSerializer | spatialdev/onadata,awemulya/fieldsight-kobocat,piqoni/onadata,GeoODK/onadata,spatialdev/onadata,smn/onadata,hnjamba/onaclone,kobotoolbox/kobocat,hnjamba/onaclone,sounay/flaminggo-test,jomolinare/kobocat,spatialdev/onadata,mainakibui/kobocat,awemulya/fieldsight-kobocat,piqoni/onadata,jomolinare/kobocat,qlands/onadata,kobotoolbox/kobocat,hnjamba/onaclone,spatialdev/onadata,kobotoolbox/kobocat,sounay/flaminggo-test,qlands/onadata,smn/onadata,piqoni/onadata,hnjamba/onaclone,sounay/flaminggo-test,smn/onadata,jomolinare/kobocat,qlands/onadata,awemulya/fieldsight-kobocat,qlands/onadata,kobotoolbox/kobocat,jomolinare/kobocat,mainakibui/kobocat,piqoni/onadata,GeoODK/onadata,mainakibui/kobocat,smn/onadata,GeoODK/onadata,mainakibui/kobocat,GeoODK/onadata,awemulya/fieldsight-kobocat,sounay/flaminggo-test | onadata/libs/serializers/metadata_serializer.py | onadata/libs/serializers/metadata_serializer.py | from django.utils.translation import ugettext as _
from rest_framework import serializers
from onadata.apps.main.models.meta_data import MetaData
METADATA_TYPES = (
('data_license', _(u"Data License")),
('form_license', _(u"Form License")),
('mapbox_layer', _(u"Mapbox Layer")),
('media', _(u"Media")),
('public_link', _(u"Public Link")),
('source', _(u"Source")),
('supporting_doc', _(u"Supporting Document"))
)
class MetaDataSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(source='pk', read_only=True)
xform = serializers.PrimaryKeyRelatedField()
data_value = serializers.CharField(max_length=255,
required=False)
data_type = serializers.ChoiceField(choices=METADATA_TYPES)
data_file = serializers.FileField(required=False)
data_file_type = serializers.CharField(max_length=255, required=False)
class Meta:
model = MetaData
def restore_object(self, attrs, instance=None):
data_type = attrs.get('data_type')
data_file = attrs.get('data_file')
xform = attrs.get('xform')
data_value = data_file.name if data_file else attrs.get('data_value')
data_file_type = data_file.content_type if data_file else None
if instance:
return super(MetaDataSerializer, self).restore_object(
attrs, instance)
return MetaData(
data_type=data_type,
xform=xform,
data_value=data_value,
data_file=data_file,
data_file_type=data_file_type
)
| from django.utils.translation import ugettext as _
from rest_framework import serializers
from onadata.apps.main.models.meta_data import MetaData
METADATA_TYPES = (
('data_license', _(u"Data License")),
('form_license', _(u"Form License")),
('mapbox_layer', _(u"Mapbox Layer")),
('media', _(u"Media")),
('public_link', _(u"Public Link")),
('source', _(u"Source")),
('supporting_doc', _(u"Supporting Document"))
)
class MetaDataSerializer(serializers.HyperlinkedModelSerializer):
xform = serializers.PrimaryKeyRelatedField()
data_value = serializers.CharField(max_length=255,
required=False)
data_type = serializers.ChoiceField(choices=METADATA_TYPES)
data_file = serializers.FileField(required=False)
data_file_type = serializers.CharField(max_length=255, required=False)
class Meta:
model = MetaData
def restore_object(self, attrs, instance=None):
data_type = attrs.get('data_type')
data_file = attrs.get('data_file')
xform = attrs.get('xform')
data_value = data_file.name if data_file else attrs.get('data_value')
data_file_type = data_file.content_type if data_file else None
if instance:
return super(MetaDataSerializer, self).restore_object(
attrs, instance)
return MetaData(
data_type=data_type,
xform=xform,
data_value=data_value,
data_file=data_file,
data_file_type=data_file_type
)
| bsd-2-clause | Python |
320fe2d32ed58f15c841f604f32f2a1e3eb2aba6 | Update test-install | brookehus/msmbuilder,rafwiewiora/msmbuilder,cxhernandez/msmbuilder,dr-nate/msmbuilder,Eigenstate/msmbuilder,peastman/msmbuilder,dr-nate/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,dr-nate/msmbuilder,cxhernandez/msmbuilder,dr-nate/msmbuilder,mpharrigan/mixtape,msultan/msmbuilder,mpharrigan/mixtape,msultan/msmbuilder,msmbuilder/msmbuilder,mpharrigan/mixtape,peastman/msmbuilder,msultan/msmbuilder,Eigenstate/msmbuilder,brookehus/msmbuilder,msmbuilder/msmbuilder,msmbuilder/msmbuilder,peastman/msmbuilder,msmbuilder/msmbuilder,cxhernandez/msmbuilder,mpharrigan/mixtape,Eigenstate/msmbuilder,peastman/msmbuilder,cxhernandez/msmbuilder,msultan/msmbuilder,cxhernandez/msmbuilder,brookehus/msmbuilder,msmbuilder/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,dr-nate/msmbuilder,rafwiewiora/msmbuilder,msultan/msmbuilder,Eigenstate/msmbuilder,rafwiewiora/msmbuilder,Eigenstate/msmbuilder,rafwiewiora/msmbuilder,brookehus/msmbuilder | msmbuilder/project_templates/0-test-install.py | msmbuilder/project_templates/0-test-install.py | """This script tests your python installation as it pertains to running project templates.
MSMBuilder supports Python 2.7 and 3.3+ and has some necessary dependencies
like numpy, scipy, and scikit-learn. This templated project enforces
some more stringent requirements to make sure all the users are more-or-less
on the same page and to allow developers to exploit more helper libraries.
You can modify the template scripts to work for your particular set-up,
but it's probably easier to install `conda` and get the packages we
recommend.
{{header}}
"""
import textwrap
# Show intro text
paragraphs = __doc__.split('\n\n')
for p in paragraphs:
print(textwrap.fill(p))
print()
warnings = 0
## Test for python 3.5
import sys
if sys.version_info < (3, 5):
print(textwrap.fill(
"These scripts were all developed on Python 3.5, "
"which is the current, stable release of Python. "
"In particular, we use subprocess.run "
"(and probably some other new features). "
"You can easily modify the scripts to work on older versions "
"of Python, but why not just upgrade? We like Continuum's "
"Anaconda Python distribution for a simple install (without root)."
))
print()
warnings += 1
## Test for matplotlib
try:
import matplotlib as plt
except ImportError:
print(textwrap.fill(
"These scripts try to make some mildly intesting plots. "
"That requires `matplotlib`."
))
print()
warnings += 1
## Test for seaborn
try:
import seaborn as sns
except ImportError:
print(textwrap.fill(
"The default matplotlib styling is a little ugly. "
"By default, these scripts try to use `seaborn` to make prettier "
"plots. You can remove all the seaborn imports if you don't want "
"to install this library, but why not just install it? Try "
"`conda install seaborn`"
))
print()
warnings += 1
## Test for xdg-open
try:
import subprocess
subprocess.check_call(['xdg-open', '--version'])
except:
print(textwrap.fill(
"For convenience, the plotting scripts can try to use `xdg-open` "
"to pop up the result of the plot. Use the --display flag on "
"msmb TemplateProject to enable this behavior."
))
warnings += 1
## Report results
if warnings == 0:
print("I didn't find any problems with your installation! Good job.")
print()
else:
print("I found {} warnings, see above. Good luck!".format(warnings))
print()
| """This script tests your python installation as it pertains to running project templates.
MSMBuilder supports Python 2.7 and 3.3+ and has some necessary dependencies
like numpy, scipy, and scikit-learn. This templated project enforces
some more stringent requirements to make sure all the users are more-or-less
on the same page and to allow developers to exploit more helper libraries.
You can modify the template scripts to work for your particular set-up,
but it's probably easier to install `conda` and get the packages we
recommend.
{{header}}
"""
import textwrap
# Show intro text
paragraphs = __doc__.split('\n\n')
for p in paragraphs:
print(textwrap.fill(p))
print()
warnings = 0
## Test for python 3.5
import sys
if sys.version_info < (3, 5):
print(textwrap.fill(
"These scripts were all developed on Python 3.5, "
"which is the current, stable release of Python. "
"In particular, we use subprocess.run "
"(and probably some other new features). "
"You can easily modify the scripts to work on older versions "
"of Python, but why not just upgrade? We like Continuum's "
"Anaconda Python distribution for a simple install (without root)."
))
print()
warnings += 1
## Test for matplotlib
try:
import matplotlib as plt
except ImportError:
print(textwrap.fill(
"These scripts try to make some mildly intesting plots. "
"That requires `matplotlib`."
))
print()
warnings += 1
## Test for seaborn
try:
import seaborn as sns
except ImportError:
print(textwrap.fill(
"The default matplotlib styling is a little ugly. "
"By default, these scripts try to use `seaborn` to make prettier "
"plots. You can remove all the seaborn imports if you don't want "
"to install this library, but why not just install it? Try "
"`conda install seaborn`"
))
print()
warnings += 1
## Test for xdg-open
try:
import subprocess
subprocess.check_call(['xdg-open', '--version'])
except:
print(textwrap.fill(
"For convenience, the plotting scripts try to use `xdg-open` "
"to pop up the result of the plot. You can remove these calls. "
"They're typically the last line of the plotting scripts."
))
warnings += 1
## Report results
if warnings == 0:
print("I didn't find any problems with your installation! Good job.")
print()
else:
print("I found {} warnings, see above. Good luck!".format(warnings))
print()
| lgpl-2.1 | Python |
a61433f4de497d517c41226477d2d96885ad92ea | Add the current site domain to the email template context | mferenca/HMS-ecommerce,edx/ecommerce,eduNEXT/edunext-ecommerce,eduNEXT/edunext-ecommerce,eduNEXT/edunext-ecommerce,mferenca/HMS-ecommerce,eduNEXT/edunext-ecommerce,edx/ecommerce,edx/ecommerce,edx/ecommerce,mferenca/HMS-ecommerce | ecommerce/notifications/notifications.py | ecommerce/notifications/notifications.py | import logging
from oscar.core.loading import get_model, get_class
from premailer import transform
from ecommerce.extensions.analytics.utils import parse_tracking_context
log = logging.getLogger(__name__)
CommunicationEventType = get_model('customer', 'CommunicationEventType')
Dispatcher = get_class('customer.utils', 'Dispatcher')
def send_notification(user, commtype_code, context, site):
"""Send different notification mail to the user based on the triggering event.
Args:
user(obj): 'User' object to whom email is to send
commtype_code(str): Communication type code
context(dict): context to be used in the mail
"""
tracking_id, client_id, ip = parse_tracking_context(user)
tracking_pixel = 'https://www.google-analytics.com/collect?v=1&t=event&ec=email&ea=open&tid={tracking_id}' \
'&cid={client_id}&uip={ip}'.format(tracking_id=tracking_id, client_id=client_id, ip=ip)
full_name = user.get_full_name()
context.update({
'full_name': full_name,
'site_domain': site.domain,
'platform_name': site.name,
'tracking_pixel': tracking_pixel,
})
try:
event_type = CommunicationEventType.objects.get(code=commtype_code)
except CommunicationEventType.DoesNotExist:
try:
messages = CommunicationEventType.objects.get_and_render(commtype_code, context)
except Exception: # pylint: disable=broad-except
log.error('Unable to locate a DB entry or templates for communication type [%s]. '
'No notification has been sent.', commtype_code)
return
else:
messages = event_type.get_messages(context)
if messages and (messages['body'] or messages['html']):
messages['html'] = transform(messages['html'])
Dispatcher().dispatch_user_messages(user, messages, site)
| import logging
from oscar.core.loading import get_model, get_class
from premailer import transform
from ecommerce.extensions.analytics.utils import parse_tracking_context
log = logging.getLogger(__name__)
CommunicationEventType = get_model('customer', 'CommunicationEventType')
Dispatcher = get_class('customer.utils', 'Dispatcher')
def send_notification(user, commtype_code, context, site):
"""Send different notification mail to the user based on the triggering event.
Args:
user(obj): 'User' object to whom email is to send
commtype_code(str): Communication type code
context(dict): context to be used in the mail
"""
tracking_id, client_id, ip = parse_tracking_context(user)
tracking_pixel = 'https://www.google-analytics.com/collect?v=1&t=event&ec=email&ea=open&tid={tracking_id}' \
'&cid={client_id}&uip={ip}'.format(tracking_id=tracking_id, client_id=client_id, ip=ip)
full_name = user.get_full_name()
context.update({
'full_name': full_name,
'platform_name': site.name,
'tracking_pixel': tracking_pixel,
})
try:
event_type = CommunicationEventType.objects.get(code=commtype_code)
except CommunicationEventType.DoesNotExist:
try:
messages = CommunicationEventType.objects.get_and_render(commtype_code, context)
except Exception: # pylint: disable=broad-except
log.error('Unable to locate a DB entry or templates for communication type [%s]. '
'No notification has been sent.', commtype_code)
return
else:
messages = event_type.get_messages(context)
if messages and (messages['body'] or messages['html']):
messages['html'] = transform(messages['html'])
Dispatcher().dispatch_user_messages(user, messages, site)
| agpl-3.0 | Python |
9a4f02b3716a3a775ad6e9e0b0a09fb777cd1b4c | make the engines pass the start function | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/engines/__init__.py | salt/engines/__init__.py | '''
Initialize the engines system. This plugin system allows for
complex services to be encapsulated within the salt plugin environment
'''
# Import python libs
import multiprocessing
# Import salt libs
import salt
class Engine(multiprocessing.Process):
'''
Execute the given engine in a new process
'''
def __init__(self, opts, service):
'''
Set up the process executor
'''
super(Engine, self).__init__()
self.opts = opts
self.service = service
def run(self):
'''
Run the master service!
'''
self.msrvc = salt.loader.msrvc(self.opts)
fun = '{0}.start'.format(self.service)
self.msrvc[fun]()
| '''
Initialize the engines system. This plugin system allows for
complex services to be encapsulated within the salt plugin environment
'''
# Import python libs
import multiprocessing
# Import salt libs
import salt
class Engine(multiprocessing.Process):
'''
Execute the given engine in a new process
'''
def __init__(self, opts, service):
'''
Set up the process executor
'''
super(Engine, self).__init__()
self.opts = opts
self.service = service
def run(self):
'''
Run the master service!
'''
self.msrvc = salt.loader.msrvc(self.opts)
self.msrvc[self.service]()
| apache-2.0 | Python |
cb29a43639989825d465199be9ae79d46a3f1458 | Remove --no-db/-db option from AIM CLI | noironetworks/aci-integration-module,noironetworks/aci-integration-module | aim/tools/cli/groups/aimcli.py | aim/tools/cli/groups/aimcli.py | # Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from aim import config
from aim.db import api
import click
from click import exceptions as exc
import logging
db_opts = [
config.cfg.StrOpt('connection',
deprecated_name='sql_connection',
default='',
secret=True,
help='URL to database'),
config.cfg.StrOpt('engine', default='',
help='Database engine for which script will be '
'generated when using offline migration.'),
]
@click.group()
@click.option('--config-file', '-c', multiple=True,
help='AIM static configuration file')
@click.option('--debug/--no-debug', default=True)
@click.pass_context
def aim(ctx, config_file, debug):
"""Group for AIM cli."""
if debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
if ctx.obj is None:
ctx.obj = {}
args = []
if config_file:
for file in config_file:
args += ['--config-file', file]
config.CONF(project='aim', args=args)
if not config.CONF.config_file:
raise exc.UsageError(
"Unable to find configuration file via the default "
"search paths (~/.aim/, ~/, /etc/aim/, /etc/) and "
"the '--config-file' option %s!" % config_file)
ctx.obj['conf'] = config.CONF
api._create_facade_lazily()
| # Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from aim import config
import click
from click import exceptions as exc
import logging
db_opts = [
config.cfg.StrOpt('connection',
deprecated_name='sql_connection',
default='',
secret=True,
help='URL to database'),
config.cfg.StrOpt('engine', default='',
help='Database engine for which script will be '
'generated when using offline migration.'),
]
@click.group()
@click.option('--config-file', '-c', multiple=True,
help='AIM static configuration file')
@click.option('--db/--no-db', default=True)
@click.option('--debug/--no-debug', default=True)
@click.pass_context
def aim(ctx, config_file, db, debug):
"""Group for AIM cli."""
if debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
if db:
try:
config.CONF.register_opts(db_opts, 'database')
except Exception:
pass
if ctx.obj is None:
ctx.obj = {}
args = []
if config_file:
for file in config_file:
args += ['--config-file', file]
config.CONF(project='aim', args=args)
if not config.CONF.config_file:
raise exc.UsageError(
"Unable to find configuration file via the default "
"search paths (~/.aim/, ~/, /etc/aim/, /etc/) and "
"the '--config-file' option %s!" % config_file)
ctx.obj['conf'] = config.CONF
| apache-2.0 | Python |
27b0a5b95e188a5bd77ae662bbb43e06dfde4749 | Use the id of the channel and unquote all of the text first. | DuaneGarber/slack-meme,joeynebula/slack-meme,tezzutezzu/slack-meme,nicolewhite/slack-meme | slack/views.py | slack/views.py | from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200 | from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200 | mit | Python |
732124f5c5d75243682a4603fe5f30bfcd018c98 | use symbolic name for default route | stackforge/akanda-appliance,openstack/akanda-appliance,stackforge/akanda-appliance,dreamhost/akanda-appliance,dreamhost/akanda-appliance,openstack/akanda-appliance | akanda/router/drivers/route.py | akanda/router/drivers/route.py | import logging
from akanda.router.drivers import base
LOG = logging.getLogger(__name__)
class RouteManager(base.Manager):
EXECUTABLE = '/sbin/route'
def __init__(self, root_helper='sudo'):
super(RouteManager, self).__init__(root_helper)
def update_default(self, config):
for net in config.networks:
if not net.is_external_network:
continue
for subnet in net.subnets:
if subnet.gateway_ip:
self._set_default_gateway(subnet.gateway_ip)
def _set_default_gateway(self, gateway_ip):
version = '-inet'
if gateway_ip.version == 6:
version += '6'
try:
current = self.sudo('get', version, 'default')
except:
current = None
if current and 'no such process' not in current.lower():
return self.sudo('change', version, 'default', str(gateway_ip))
else:
return self.sudo('add', version, 'default', str(gateway_ip))
| import logging
from akanda.router.drivers import base
LOG = logging.getLogger(__name__)
class RouteManager(base.Manager):
EXECUTABLE = '/sbin/route'
def __init__(self, root_helper='sudo'):
super(RouteManager, self).__init__(root_helper)
def update_default(self, config):
for net in config.networks:
if not net.is_external_network:
continue
for subnet in net.subnets:
if subnet.cidr.version == 4 and subnet.gateway_ip:
self._set_default_gateway(subnet.gateway_ip, '0.0.0.0/0')
elif subnet.cidr.version == 6 and subnet.gateway_ip:
self._set_default_v6_gateway(subnet.gateway_ip, '::')
def _set_default_gateway(self, gateway_ip, prefix):
net = '-inet'
if ':' in prefix:
net += '6'
try:
current = self.sudo('get', net, prefix)
except:
current = None
if current and 'no such process' not in current.lower():
return self.sudo('change', net, prefix, str(gateway_ip))
else:
return self.sudo('add', net, prefix, str(gateway_ip))
| apache-2.0 | Python |
8df06e8ba6b06fee4155fee34ec9dc5e5d407b78 | bump version | Lionardo/aldryn-stripe-shop,Lionardo/aldryn-stripe-shop,Lionardo/aldryn-stripe-shop | aldryn_stripe_shop/__init__.py | aldryn_stripe_shop/__init__.py | # -*- coding: utf-8 -*-
__version__ = '0.0.3'
| # -*- coding: utf-8 -*-
__version__ = '0.0.2'
| bsd-3-clause | Python |
20595878e127db9ca10d124d412dd29242b51c4e | add mock | osallou/codetesting-with-python-training | tests/sample_test.py | tests/sample_test.py | from nose.tools import *
from nose.plugins.attrib import attr
from mysamplecode.samplecode import SampleCode
import mock
from mock import patch
import logging
import unittest
class MockLdapConn(object):
ldap_user = 'sampleuser'
ldap_user_email = 'ldap@no-reply.org'
STRATEGY_SYNC = 0
AUTH_SIMPLE = 0
STRATEGY_SYNC = 0
STRATEGY_ASYNC_THREADED = 0
SEARCH_SCOPE_WHOLE_SUBTREE = 0
GET_ALL_INFO = 0
@staticmethod
def Server(ldap_host, port, get_info):
return None
@staticmethod
def Connection(ldap_server, auto_bind=True, read_only=True, client_strategy=0, user=None, password=None, authentication=0,check_names=True):
if user is not None and password is not None:
if password == 'notest':
#raise ldap3.core.exceptions.LDAPBindError('no bind')
return None
return MockLdapConn(ldap_server)
def __init__(self, url=None):
pass
def search(self, base_dn, filter, scope, attributes=[]):
if MockLdapConn.ldap_user in filter:
self.response = [{'dn': MockLdapConn.ldap_user, 'attributes': {'mail': [MockLdapConn.ldap_user_email]}}]
return [(MockLdapConn.ldap_user, {'mail': [MockLdapConn.ldap_user_email]})]
else:
raise Exception('no match')
def unbind(self):
pass
class Test(unittest.TestCase):
def setUp(self):
self.counter = 0
def tearDown(self):
pass
@attr('count')
def testAdd(self):
sample = SampleCode()
result = sample.add(1,1)
self.assertEqual(2, result)
@patch('ldap3.Connection')
def testAuthenticate(self, initialize_mock):
mockldap = MockLdapConn()
initialize_mock.return_value = MockLdapConn.Connection(None, None, None, None)
sample = SampleCode()
is_auth = sample.authenticate('sampleuser', 'yy')
| from nose.tools import *
from nose.plugins.attrib import attr
from mysamplecode.samplecode import SampleCode
import mock
from mock import patch
import logging
import unittest
class Test(unittest.TestCase):
def setUp(self):
self.counter = 0
def tearDown(self):
pass
@attr('count')
def testAdd(self):
sample = SampleCode()
result = sample.add(1,1)
self.assertEqual(2, result)
def testAuthenticate(self, initialize_mock):
sample = SampleCode()
is_auth = sample.authenticate('sampleuser', 'yy')
| apache-2.0 | Python |
8583e5d4cd0e19c5177547833e590a6253a4cbc9 | Fix import name | econ-ark/HARK,econ-ark/HARK | examples/Calibration/SCF_distributions.py | examples/Calibration/SCF_distributions.py | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 18 13:57:50 2021
@author: Mateo
"""
from HARK.datasets.SCF.WealthIncomeDist.SCFDistTools import income_wealth_dists_from_scf
import seaborn as sns
from itertools import product, starmap
import pandas as pd
# List the education levels and years
educ_lvls = ["NoHS", "HS", "College"]
years = list(range(1995, 2022, 3))
age = 25
base_year = 1992
# %% Get the distribution of aNrm and pLvl at each year x education
params = list(product([base_year], [age], educ_lvls, years))
base_year, age, education, year = list(zip(*params))
frame = pd.DataFrame(
{"base_year": base_year, "age": age, "education": education, "wave": year}
)
results = list(starmap(income_wealth_dists_from_scf, params))
frame = pd.concat([frame, pd.DataFrame(results)], axis=1)
# %% Plot time trends at different education levels.
# Formatting
frame = frame.melt(id_vars=["base_year", "age", "education", "wave"])
aux = frame["variable"].str.split("(Mean|Std)", n=1, expand=True)
frame["variable"] = aux[0]
frame["stat"] = aux[1]
# Plot
g = sns.FacetGrid(frame, col="stat", row="variable", hue="education", sharey=True)
g.map(sns.scatterplot, "wave", "value", alpha=0.7)
g.add_legend()
| # -*- coding: utf-8 -*-
"""
Created on Mon Jan 18 13:57:50 2021
@author: Mateo
"""
from HARK.datasets.SCF.WealthIncomeDist.parser import income_wealth_dists_from_scf
import seaborn as sns
from itertools import product, starmap
import pandas as pd
# List the education levels and years
educ_lvls = ["NoHS", "HS", "College"]
years = list(range(1995, 2022, 3))
age = 25
base_year = 1992
# %% Get the distribution of aNrm and pLvl at each year x education
params = list(product([base_year], [age], educ_lvls, years))
base_year, age, education, year = list(zip(*params))
frame = pd.DataFrame(
{"base_year": base_year, "age": age, "education": education, "wave": year}
)
results = list(starmap(income_wealth_dists_from_scf, params))
frame = pd.concat([frame, pd.DataFrame(results)], axis=1)
# %% Plot time trends at different education levels.
# Formatting
frame = frame.melt(id_vars=["base_year", "age", "education", "wave"])
aux = frame["variable"].str.split("(Mean|Std)", n=1, expand=True)
frame["variable"] = aux[0]
frame["stat"] = aux[1]
# Plot
g = sns.FacetGrid(frame, col="stat", row="variable", hue="education", sharey=True)
g.map(sns.scatterplot, "wave", "value", alpha=0.7)
g.add_legend()
| apache-2.0 | Python |
71a7d3197719ee64bd488704a0ba990a140a2971 | fix event filter in sms | sunlightlabs/tcamp,sunlightlabs/tcamp,sunlightlabs/tcamp,sunlightlabs/tcamp | tcamp/sms/views.py | tcamp/sms/views.py | from django_twilio.views import sms
from django_twilio.decorators import twilio_view
from django.utils import timezone
from dateutil.parser import parse as dateparse
from sked.models import Event, Session
@twilio_view
def coming_up(request, message, to=None, sender=None, action=None, method=None,
status_callback=None):
try:
sessions = Session.objects.filter(event=Event.objects.current(), is_public=True)
inmsg = request.REQUEST.get('Body').strip() or 'next'
if inmsg.lower() == 'next':
message = _as_sms(Session.objects.next())
if inmsg.lower() == 'now':
message = _as_sms(Session.objects.current())
else:
try:
ts = dateparse('%s').replace(tzinfo=timezone.get_current_timezone())
message = _as_sms(sessions.filter(start_time=ts))
except:
message = 'Unable to parse that time. Try something like "4:30", or "next"'
except Exception as e:
message = e
return sms(request, message, to, sender, action, method, status_callback)
def _as_sms(qset):
msg = 'No events.'
if not qset.count():
return msg
tm = qset[0].start_time.astimezone(timezone.get_current_timezone())
msg = u'At %s:\n' % tm.strftime('%-I:%M')
msg += u'\n\n'.join(['%s (%s)' % (s.title, s.location.name) for s in qset])
return msg
| from django_twilio.views import sms
from django_twilio.decorators import twilio_view
from django.utils import timezone
from dateutil.parser import parse as dateparse
from sked.models import Event, Session
@twilio_view
def coming_up(request, message, to=None, sender=None, action=None, method=None,
status_callback=None):
try:
sessions = Session.objects.filter(event_id=Event.objects.current(), is_public=True)
inmsg = request.REQUEST.get('Body').strip() or 'next'
if inmsg.lower() == 'next':
message = _as_sms(Session.objects.next())
if inmsg.lower() == 'now':
message = _as_sms(Session.objects.current())
else:
try:
ts = dateparse('%s').replace(tzinfo=timezone.get_current_timezone())
message = _as_sms(sessions.filter(start_time=ts))
except:
message = 'Unable to parse that time. Try something like "4:30", or "next"'
except Exception as e:
message = e
return sms(request, message, to, sender, action, method, status_callback)
def _as_sms(qset):
msg = 'No events.'
if not qset.count():
return msg
tm = qset[0].start_time.astimezone(timezone.get_current_timezone())
msg = u'At %s:\n' % tm.strftime('%-I:%M')
msg += u'\n\n'.join(['%s (%s)' % (s.title, s.location.name) for s in qset])
return msg
| bsd-3-clause | Python |
0d0ac33dcb17692555a673883d42505f4716fcbd | Fix whitespace | glasnost/kremlin,glasnost/kremlin,glasnost/kremlin | kremlin/config_defaults.py | kremlin/config_defaults.py | """
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
""" Configuration Defaults """
DEBUG = False # Debug mode
TESTING = False # Testing mode (unit testing)
SECRET_KEY = None # Session secret key
USE_X_SENDFILE = False # Enable web server sendfile support
LOGGER_NAME = "kremlin" # Main logger name
#SERVER_NAME = "localhost" # Default server name
MAX_CONTENT_LENGTH = 32 * 1024 * 1024 # 32 Megabytes
CSRF_ENABLED = True # WTForms Cross Site Request Forgery Prevention
UPLOADED_IMAGES_DEST = ""
#UPLOAD_DEFAULT_URL="..."
| """
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
""" Configuration Defaults """
DEBUG = False # Debug mode
TESTING = False # Testing mode (unit testing)
SECRET_KEY = None # Session secret key
USE_X_SENDFILE = False # Enable web server sendfile support
LOGGER_NAME = "kremlin" # Main logger name
#SERVER_NAME = "localhost" # Default server name
MAX_CONTENT_LENGTH = 32 * 1024 * 1024 # 32 Megabytes
CSRF_ENABLED = True # WTForms Cross Site Request Forgery Prevention
UPLOADED_IMAGES_DEST=""
#UPLOAD_DEFAULT_URL="..."
| bsd-2-clause | Python |
ddd45afa0708682bb11d606e03e38aed111d7b9c | Implement Big Banana, Deviate Banana, Rotten Banana | liujimj/fireplace,Ragowit/fireplace,butozerca/fireplace,butozerca/fireplace,smallnamespace/fireplace,amw2104/fireplace,smallnamespace/fireplace,beheh/fireplace,NightKev/fireplace,Meerkov/fireplace,Meerkov/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,amw2104/fireplace,jleclanche/fireplace,oftc-ftw/fireplace | fireplace/cards/game/all.py | fireplace/cards/game/all.py | """
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
| """
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
| agpl-3.0 | Python |
cef855415f447f802c3777468f34e19e85ca7238 | undo doc test change for linux/mac | simpeg/discretize,simpeg/discretize,simpeg/discretize | tests/docs/test_docs.py | tests/docs/test_docs.py | import subprocess
import unittest
import os
import platform
class Doc_Test(unittest.TestCase):
@property
def path_to_docs(self):
dirname, file_name = os.path.split(os.path.abspath(__file__))
return dirname.split(os.path.sep)[:-2] + ["docs"]
def test_html(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
if platform.system() != "Windows":
response = subprocess.run(["make", "html"])
self.assertTrue(response.returncode == 0)
else:
response = subprocess.call(["make", "html"], shell=True)
self.assertTrue(response == 0)
os.chdir(wd)
def test_linkcheck(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
if platform.system() != "Windows":
response = subprocess.run(["make", "linkcheck"])
self.assertTrue(response.returncode == 0)
else:
response = subprocess.call(["make", "linkcheck"], shell=True)
self.assertTrue(response == 0)
os.chdir(wd)
if __name__ == "__main__":
unittest.main()
| import subprocess
import unittest
import os
import platform
class Doc_Test(unittest.TestCase):
@property
def path_to_docs(self):
dirname, file_name = os.path.split(os.path.abspath(__file__))
return dirname.split(os.path.sep)[:-2] + ["docs"]
def test_html(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
if platform.system() != "Windows":
response = subprocess.run(["make", "html-noplot"])
self.assertTrue(response.returncode == 0)
else:
response = subprocess.call(["make", "html"], shell=True)
self.assertTrue(response == 0)
os.chdir(wd)
def test_linkcheck(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
if platform.system() != "Windows":
response = subprocess.run(["make", "linkcheck-noplot"])
self.assertTrue(response.returncode == 0)
else:
response = subprocess.call(["make", "linkcheck"], shell=True)
self.assertTrue(response == 0)
os.chdir(wd)
if __name__ == "__main__":
unittest.main()
| mit | Python |
ee7f9316246246a02e1cf91ac1f41b431a592a38 | Solve for ini3 | DanZBishop/Rosalind | ini3/ini3.py | ini3/ini3.py | #!/usr/bin/python
import argparse
parser = argparse.ArgumentParser(description="Slices a given string between given indices")
parser.add_argument("input_string", type=str, nargs=1, help="String to slice")
parser.add_argument("slice_ranges", type=int, nargs=4, help="Indices to slice")
args = parser.parse_args()
string = args.input_string[0]
print string[args.slice_ranges[0]:args.slice_ranges[1]+1] + " " + string[args.slice_ranges[2]:args.slice_ranges[3]+1] | #!/usr/bin/python
| apache-2.0 | Python |
4198429c7049b156561a6cb5fe0e7dbc27fb8648 | add tests for some properties with predefined interger types | ClusterHQ/pyzfs | tests/test_nvlist.py | tests/test_nvlist.py | import json
from libzfs_core.nvlist import *
from libzfs_core.nvlist import _lib
props_in = {
"key1": "str",
"key2": 10,
"key3": {
"skey1": True,
"skey2": None,
"skey3": [
True,
False,
True
]
},
"key4": [
"ab",
"bc"
],
"key5": [
2 ** 64 - 1,
1,
2,
3
],
"key6": [
uint32_t(10),
uint32_t(11)
],
"key7": [
{
"skey71": "a",
"skey72": "b",
},
{
"skey71": "c",
"skey72": "d",
},
{
"skey71": "e",
"skey72": "f",
}
],
"type": 2 ** 32 - 1,
"pool_context": -(2 ** 31)
}
props_out = {}
with nvlist_in(props_in) as x:
print "Dumping a C nvlist_t produced from a python dictionary:"
print "(ignore 'bad config type 24' message)"
_lib.dump_nvlist(x, 2)
with nvlist_out(props_out) as y:
_lib.nvlist_dup(x, y, 0)
print "\n\n"
print "Dumping a dictionary reconstructed from the nvlist_t:"
print json.dumps(props_out, sort_keys=True, indent=4)
| import json
from libzfs_core.nvlist import *
from libzfs_core.nvlist import _lib
props_in = {
"key1": "str",
"key2": 10,
"key3": {
"skey1": True,
"skey2": None,
"skey3": [
True,
False,
True
]
},
"key4": [
"ab",
"bc"
],
"key5": [
2 ** 64 - 1,
1,
2,
3
],
"key6": [
uint32_t(10),
uint32_t(11)
],
"key7": [
{
"skey71": "a",
"skey72": "b",
},
{
"skey71": "c",
"skey72": "d",
},
{
"skey71": "e",
"skey72": "f",
}
]
}
props_out = {}
with nvlist_in(props_in) as x:
print "Dumping a C nvlist_t produced from a python dictionary:"
print "(ignore 'bad config type 24' message)"
_lib.dump_nvlist(x, 2)
with nvlist_out(props_out) as y:
_lib.nvlist_dup(x, y, 0)
print "\n\n"
print "Dumping a dictionary reconstructed from the nvlist_t:"
print json.dumps(props_out, sort_keys=True, indent=4)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.