blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4c32a495bfbdac69877d7c9e4948d4c7ad7fb658 | e2b003488a88a671868291ed5889d208bc07c416 | /GasPlatform/mygas/models.py | dff8372f26663a382c2d3b3d0caf4d13ea48f9c3 | [] | no_license | Kcrystalwll/FeijuProject | 4409ed334670e231e71fa47f2e1dc740f1734e9a | 89def3887adc9c833bcf8c0181a51313275ce2e0 | refs/heads/master | 2020-06-05T03:19:18.336770 | 2019-06-17T07:27:41 | 2019-06-17T07:27:41 | 192,295,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,834 | py | from django.db import models
from rbac.models import User_info
from django.contrib.auth.models import User
# Create your models here.
#管理所有厂商
class Manufacture(models.Model):
ManufactureName = models.CharField(max_length=30,verbose_name="生产厂商")
code = models.CharField(max_length=2,verbose_name="生产厂商代码")
class Meta:
verbose_name_plural = "生产厂商表"
verbose_name = "生产厂商"
def __str__(self):
return self.ManufactureName
class GS_MeterTypeInfo(models.Model):
user = models.ForeignKey(User_info) # 和登录用户相连(录入人员)
check_user = models.CharField(max_length=50,null=True) #审核人员
allo_user = models.CharField(max_length=50,null=True) #分配人员
MeterId = models.CharField(unique=True,max_length=12) # 表号 设备类型+厂商代码+年月日+生产流水号
# 燃气表表类型编号 00膜式表 01修正仪 03超声波 20IC卡-膜式表 21IC卡-修正仪 23IC卡-超声波
MeterType = models.CharField(max_length=10)
TimeOfProduce = models.DateField() # 燃气表生产日期
ManufactureName = models.ForeignKey('Manufacture') # 燃气表生产厂商 Manufacture_id
Subtime = models.DateTimeField(null=True) # 提交时间
CheckTime = models.DateTimeField(null=True) # 审核时间
IsSubmit = models.BooleanField(default=0) # 是否提交 1提交 0未提交
IsDataChecked = models.BooleanField(default=0) # 数据是否审核 1已审核 0待审核
DataCheckedResult = models.BooleanField(default=0) # 审核结果 1通过 0不通过
IsAllocated = models.BooleanField(default=0) # 是否分配 1已分配 0未分配
# 分配结果
CommandTest = models.CharField(max_length=10,null=True)
ICTest = models.CharField(max_length=10,null=True)
ChuTest = models.CharField(max_length=10,null=True)
ZhoTest = models.CharField(max_length=10,null=True)
MianTest = models.CharField(max_length=10,null=True)
MeterPrivilege = models.CharField(max_length=1, default="0") # 优先
IsTest = models.BooleanField(default=0) # 是否测试入库 1测试完成 0测试未完成
#测试人员可以是登录用户
TestUnit = models.CharField(max_length=50,default='上海燃气有限公司')
class Meta:
ordering = ['-Subtime'] #从大到小排序
#IC卡数据表
class GS_MeterInfo_IC(models.Model):
MeterTypeId=models.OneToOneField('GS_MeterTypeInfo',unique=True,null=True)
MeterId = models.CharField(max_length=12,unique=True)
##计费数据
ChargingStatusWord = models.CharField(max_length=2,null=True)
CurrentVol = models.DecimalField(max_digits=12,decimal_places=2,null=True)
RemainingSum = models.DecimalField(max_digits=8,decimal_places=2,null=True)
CumulativeSum = models.DecimalField(max_digits=12,decimal_places=2,null=True)
CurrentPrice = models.DecimalField(max_digits=4,decimal_places=2,null=True)
CurrentPriceInitialVol = models.DecimalField(max_digits=12,decimal_places=2,null=True)
LastPrice = models.DecimalField(max_digits=4,decimal_places=2,null=True)
LastPriceInitialVol = models.DecimalField(max_digits=12,decimal_places=2,null=True)
ChargingTime = models.CharField(max_length=8,null=True)
##设置延时价格体系
VerComparison = models.CharField(max_length=2,null=True)
PriceSysDate = models.CharField(max_length=10,null=True)
PriceMode = models.CharField(max_length=2,null=True) #常规01 其他02
PriceSysVer = models.CharField(max_length=4,null=True)
PriceNormal = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceSysCycle = models.CharField(max_length=2,null=True)
PriceCycleDate = models.CharField(max_length=8,null=True)
PriceClearSign = models.CharField(max_length=2,null=True) #01清 00不清
##第一阶段
PriceEndDateOne = models.CharField(max_length=8,null=True)
PriceOne1 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceOneAmount1 = models.CharField(max_length=8,null=True)
PriceOne2 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceOneAmount2 = models.CharField(max_length=8,null=True)
PriceOne3 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
##第二阶段
PriceEndDateTwo = models.CharField(max_length=8,null=True)
PriceTwo1 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceTwoAmount1 = models.CharField(max_length=8,null=True)
PriceTwo2 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceTwoAmount2 = models.CharField(max_length=8,null=True)
PriceTwo3 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
##第三阶段
PriceEndDateThree = models.CharField(max_length=8,null=True)
PriceThree1 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceThreeAmount1 = models.CharField(max_length=8,null=True)
PriceThree2 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceThreeAmount2 = models.CharField(max_length=8,null=True)
PriceThree3 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
##第四阶段
PriceEndDateFour = models.CharField(max_length=8,null=True)
PriceFour1 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFourAmount1 = models.CharField(max_length=8,null=True)
PriceFour2 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFourAmount2 = models.CharField(max_length=8,null=True)
PriceFour3 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
##第五阶段
PriceEndDateFive = models.CharField(max_length=8,null=True)
PriceFive1 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFiveAmount1 = models.CharField(max_length=8,null=True)
PriceFive2 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFiveAmount2 = models.CharField(max_length=8,null=True)
PriceFive3 = models.DecimalField(max_digits=4,decimal_places=2,null=True)
##当前价格体系
PriceSysDate_C = models.CharField(max_length=10,null=True)
PriceSysCycle_C = models.CharField(max_length=2,null=True)
PriceMode_C = models.CharField(max_length=2,null=True) #常规01 其他02
PriceCycleDate_C = models.CharField(max_length=8,null=True)
PriceSysVer_C = models.CharField(max_length=4,null=True)
PriceClearSign_C = models.CharField(max_length=2,null=True) #01清 00不清
PriceNormal_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
DelayExists_C = models.CharField(max_length=2,null=True) #0不存在 1存在
## 第一阶段
PriceEndDateOne_C = models.CharField(max_length=8,null=True)
PriceOne1_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceOneAmount1_C = models.CharField(max_length=8,null=True)
PriceOne2_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceOneAmount2_C = models.CharField(max_length=8,null=True)
PriceOne3_C= models.DecimalField(max_digits=4,decimal_places=2,null=True)
## 第二阶段
PriceEndDateTwo_C = models.CharField(max_length=8,null=True)
PriceTwo1_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceTwoAmount1_C = models.CharField(max_length=8,null=True)
PriceTwo2_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceTwoAmount2_C = models.CharField(max_length=8,null=True)
PriceTwo3_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
## 第三阶段
PriceEndDateThree_C = models.CharField(max_length=8,null=True)
PriceThree1_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceThreeAmount1_C = models.CharField(max_length=8,null=True)
PriceThree2_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceThreeAmount2_C = models.CharField(max_length=8,null=True)
PriceThree3_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
## 第四阶段
PriceEndDateFour_C = models.CharField(max_length=8,null=True)
PriceFour1_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFourAmount1_C = models.CharField(max_length=8,null=True)
PriceFour2_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFourAmount2_C = models.CharField(max_length=8,null=True)
PriceFour3_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
## 第五阶段
PriceEndDateFive_C = models.CharField(max_length=8,null=True)
PriceFive1_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFiveAmount1_C = models.CharField(max_length=8,null=True)
PriceFive2_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
PriceFiveAmount2_C = models.CharField(max_length=8,null=True)
PriceFive3_C = models.DecimalField(max_digits=4,decimal_places=2,null=True)
##卡片充值记录
RechargeDate1 = models.CharField(max_length=8, null=True)
RemainingSumBefore1 = models.CharField(max_length=8, null=True)
RechargeSum1 = models.CharField(max_length=8, null=True)
RechargeDate2 = models.CharField(max_length=8, null=True)
RemainingSumBefore2 = models.CharField(max_length=8, null=True)
RechargeSum2 = models.CharField(max_length=8, null=True)
RechargeDate3 = models.CharField(max_length=8, null=True)
RemainingSumBefore3 = models.CharField(max_length=8, null=True)
RechargeSum3 = models.CharField(max_length=8, null=True)
RechargeDate4 = models.CharField(max_length=8, null=True)
RemainingSumBefore4 = models.CharField(max_length=8, null=True)
RechargeSum4 = models.CharField(max_length=8, null=True)
RechargeDate5 = models.CharField(max_length=8, null=True)
RemainingSumBefore5 = models.CharField(max_length=8, null=True)
RechargeSum5 = models.CharField(max_length=8, null=True)
#膜式表数据表
class GS_MeterInfo_MSB(models.Model):
MeterTypeId = models.OneToOneField('GS_MeterTypeInfo', unique=True, null=True)
MeterId=models.CharField(max_length=12,unique=True)
#常规数据(供初检用)
Com_no_msb=models.CharField(max_length=16,null=True)
Sw_rlse_msb=models.CharField(max_length=4,null=True)
Real_vol=models.DecimalField(max_digits=10,decimal_places=2,null=True)
Meter_v=models.DecimalField(max_digits=4,decimal_places=2,null=True)
Temperature_msb=models.DecimalField(max_digits=4,decimal_places=2,null=True)
Status=models.CharField(max_length=4,null=True)
DropMeter1_msb=models.DateTimeField(null=True)
DropMeter2_msb=models.DecimalField(max_digits=8,decimal_places=2,null=True)
ReverseInstall1_msb=models.DateTimeField(null=True)
ReverseInstall2_msb=models.DecimalField(max_digits=8,decimal_places=2,null=True)
MeasureBreakdown1_msb=models.DateTimeField(null=True)
MeasureBreakdown2_msb=models.DecimalField(max_digits=8,decimal_places=2,null=True)
TSensorBreakdown1_msb=models.DateTimeField(null=True)
TSensorBreakdown2_msb=models.DecimalField(max_digits=8,decimal_places=2,null=True)
PSensorBreakdown1_msb=models.DateTimeField(null=True)
PSensorBreakdown2_msb=models.DecimalField(max_digits=8,decimal_places=2,null=True)
TrafficAbnormality1_msb=models.DateTimeField(null=True)
TrafficAbnormality2_msb = models.DecimalField(max_digits=8,decimal_places=2,null=True)
ComVol1_msb = models.DateTimeField(null=True)
ComVol2_msb = models.DecimalField(max_digits=8,decimal_places=2,null=True)
BaseVol1_msb = models.DateTimeField(null=True) #计量电压低
BaseVol2_msb = models.DecimalField(max_digits=8,decimal_places=2,null=True)
CollectFault1_msb = models.DateTimeField(null=True) #采集故障
CollectFault2_msb = models.DecimalField(max_digits=8,decimal_places=2,null=True)
GasLeakClose1_msb = models.DateTimeField(null=True)
GasLeakClose2_msb = models.DecimalField(max_digits=8,decimal_places=2,null=True)
GasStolenClose1_msb = models.DateTimeField(null=True)
GasStolenClose2_msb = models.DecimalField(max_digits=8,decimal_places=2,null=True)
ResetClose1_msb = models.DateTimeField(null=True)
ResetClose2_msb = models.DecimalField(max_digits=8,decimal_places=2,null=True)
LowVolClose1_msb = models.DateTimeField(null=True) #计量低电压关阀
LowVolClose2_msb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CollectClose1_msb = models.DateTimeField(null=True) # 超声波计量故障关阀
CollectClose2_msb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CommandClose1_msb = models.DateTimeField(null=True) # 关阀指令关阀
CommandClose2_msb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ManulOpen1_msb = models.DateTimeField(null=True) # 人工开阀
ManulOpen2_msb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
#程序远程升级数据
FTPUserName_msb = models.CharField(max_length=25,default='01')
FTPPassword_msb = models.CharField(max_length=25,default='01')
FTPAddress_msb = models.CharField(max_length=50,default='139.199.191.23')
FTPCatalog_msb = models.CharField(max_length=50,default='D:\FTPPoint\File')
FileName_msb = models.CharField(max_length=8,null=True)
#修正仪数据表
class GS_MeterInfo_XZY(models.Model):
MeterTypeId = models.OneToOneField('GS_MeterTypeInfo', unique=True, null=True)
MeterId = models.CharField(max_length=12,unique=True)
# 常规数据(供初检用)
Com_no_xzy = models.CharField(max_length=16, null=True)
Sw_rlse_xzy = models.CharField(max_length=4, null=True)
MeterNum = models.IntegerField(null=True)
Temperature_xzy = models.DecimalField(max_digits=4,decimal_places=2,null=True)
Disturb_Total_Vol = models.DecimalField(max_digits=8,decimal_places=2,null=True)
Pressure = models.CharField(max_length=8, null=True)
Correction_E = models.DecimalField(max_digits=4,decimal_places=2,null=True)
Stan_Total_Vol = models.DecimalField(max_digits=12,decimal_places=2,null=True)
Stan_Ins_Ele_xzy = models.DecimalField(max_digits=6,decimal_places=2,null=True)
Work_Total_Vol = models.DecimalField(max_digits=12,decimal_places=2,null=True)
Work_Ins_Ele_xzy = models.DecimalField(max_digits=6,decimal_places=2,null=True)
DropMeter1_xzy = models.DateTimeField(null=True)
DropMeter2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ReverseInstall1_xzy = models.DateTimeField(null=True)
ReverseInstall2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
MeasureBreakdown1_xzy = models.DateTimeField(null=True)
MeasureBreakdown2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
TSensorBreakdown1_xzy = models.DateTimeField(null=True)
TSensorBreakdown2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
PSensorBreakdown1_xzy = models.DateTimeField(null=True)
PSensorBreakdown2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
TrafficAbnormality1_xzy = models.DateTimeField(null=True)
TrafficAbnormality2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ComVol1_xzy = models.DateTimeField(null=True)
ComVol2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
BaseVol1_xzy = models.DateTimeField(null=True) # 计量电压低
BaseVol2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CollectFault1_xzy = models.DateTimeField(null=True) # 采集故障
CollectFault2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
GasLeakClose1_xzy = models.DateTimeField(null=True)
GasLeakClose2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
GasStolenClose1_xzy = models.DateTimeField(null=True)
GasStolenClose2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ResetClose1_xzy = models.DateTimeField(null=True)
ResetClose2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
LowVolClose1_xzy = models.DateTimeField(null=True) # 计量低电压关阀
LowVolClose2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CollectClose1_xzy = models.DateTimeField(null=True) # 超声波计量故障关阀
CollectClose2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CommandClose1_xzy = models.DateTimeField(null=True) # 关阀指令关阀
CommandClose2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ManulOpen1_xzy = models.DateTimeField(null=True) # 人工开阀
ManulOpen2_xzy = models.DecimalField(max_digits=8, decimal_places=2, null=True)
# 程序远程升级数据
FTPUserName_xzy = models.CharField(max_length=25, default='01')
FTPPassword_xzy = models.CharField(max_length=25, default='01')
FTPAddress_xzy = models.CharField(max_length=50, default='139.199.191.23')
FTPCatalog_xzy = models.CharField(max_length=50, default='D:\FTPPoint\File')
FileName_xzy = models.CharField(max_length=8, null=True)
#超声波数据表
class GS_MeterInfo_CSB(models.Model):
MeterTypeId = models.OneToOneField('GS_MeterTypeInfo', unique=True, null=True)
MeterId = models.CharField(max_length=12, unique=True)
# 常规数据(供初检用)
Com_no_csb = models.CharField(max_length=16, null=True)
Sw_rlse_csb = models.CharField(max_length=4, null=True)
Vol1 = models.DecimalField(max_digits=4, decimal_places=2, null=True)
Stan_Ins_Ele_csb = models.DecimalField(max_digits=6, decimal_places=3, null=True)
Vol2 = models.DecimalField(max_digits=4, decimal_places=2, null=True)
Work_Ins_Ele_csb = models.DecimalField(max_digits=6, decimal_places=3, null=True)
MeterStateWord = models.CharField(max_length=2, null=True)
Temperature_csb = models.DecimalField(max_digits=4, decimal_places=2, null=True)
MeterInStateWord = models.CharField(max_length=2, null=True)
PValue = models.DecimalField(max_digits=8, decimal_places=3, null=True)
Stan_Total_Ele = models.DecimalField(max_digits=10, decimal_places=3, null=True)
Peak_Ele = models.DecimalField(max_digits=6, decimal_places=3, null=True)
Work_Total_Ele = models.DecimalField(max_digits=10, decimal_places=3, null=True)
DropMeter1_csb = models.DateTimeField(null=True)
DropMeter2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ReverseInstall1_csb = models.DateTimeField(null=True)
ReverseInstall2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
MeasureBreakdown1_csb = models.DateTimeField(null=True)
MeasureBreakdown2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
TSensorBreakdown1_csb = models.DateTimeField(null=True)
TSensorBreakdown2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
PSensorBreakdown1_csb = models.DateTimeField(null=True)
PSensorBreakdown2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
TrafficAbnormality1_csb = models.DateTimeField(null=True)
TrafficAbnormality2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ComVol1_csb = models.DateTimeField(null=True)
ComVol2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
BaseVol1_csb = models.DateTimeField(null=True) # 计量电压低
BaseVol2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CollectFault1_csb = models.DateTimeField(null=True) # 采集故障
CollectFault2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
GasLeakClose1_csb = models.DateTimeField(null=True)
GasLeakClose2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
GasStolenClose1_csb = models.DateTimeField(null=True)
GasStolenClose2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ResetClose1_csb = models.DateTimeField(null=True)
ResetClose2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
LowVolClose1_csb = models.DateTimeField(null=True) # 计量低电压关阀
LowVolClose2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CollectClose1_csb = models.DateTimeField(null=True) # 超声波计量故障关阀
CollectClose2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
CommandClose1_csb = models.DateTimeField(null=True) # 关阀指令关阀
CommandClose2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
ManulOpen1_csb = models.DateTimeField(null=True) # 人工开阀
ManulOpen2_csb = models.DecimalField(max_digits=8, decimal_places=2, null=True)
AF_ULimit1 = models.CharField(max_length=20, null=True)
AF_DLimit1 = models.CharField(max_length=20, null=True)
AF_LLimit1 = models.CharField(max_length=20, null=True)
AF_ULimit2 = models.CharField(max_length=20, null=True)
AF_DLimit2 = models.CharField(max_length=20, null=True)
AF_LLimit2 = models.CharField(max_length=20, null=True)
AF_ULimit3 = models.CharField(max_length=20, null=True)
AF_DLimit3 = models.CharField(max_length=20, null=True)
AF_LLimit3 = models.CharField(max_length=20, null=True)
# 程序远程升级数据
FTPUserName_csb = models.CharField(max_length=25, default='01')
FTPPassword_csb = models.CharField(max_length=25, default='01')
FTPAddress_csb = models.CharField(max_length=50, default='139.199.191.23')
FTPCatalog_csb = models.CharField(max_length=50, default='D:\FTPPoint\File')
FileName_csb = models.CharField(max_length=8, null=True)
#测试进度追踪(当前的测试的智能表)
class Meter_Test(models.Model):
MeterId = models.CharField(max_length=12, primary_key=True, unique=True) #表号是主键
MeterType = models.CharField(max_length=10)
MeterComState = models.CharField(max_length=25)
MeterIcState = models.CharField(max_length=25)
MeterChuState = models.CharField(max_length=25)
MeterZhongState = models.CharField(max_length=25)
MeterState = models.CharField(max_length=25, default='初始')
MeterTest = models.CharField(max_length=25, default='空闲')
MeterRand_num = models.CharField(max_length=25, null=True)
Meteriport = models.CharField(max_length=50, null=True)
MeterTime = models.DateTimeField(auto_now_add=True, null=True) # 当前时间
MeterCancel = models.CharField(max_length=10,default="否") #是否取消测试
MeterEvery = models.CharField(max_length=32, default="00000000000000000000000000000000") #单项结果
MeterPrivilege = models.CharField(max_length=1,default="0") #优先
ManufactureName = models.ForeignKey('Manufacture',null=True) # 燃气表生产厂商 Manufacture_id
Subtime = models.DateTimeField(null=True) # 提交时间
CheckTime = models.DateTimeField(null=True) # 审核时间
class Meter_Result(models.Model):
MeterId = models.CharField(max_length=12, primary_key=True, unique=True) #表号是主键
MeterType = models.CharField(max_length=10)
MeterComState = models.CharField(max_length=25)
MeterIcState = models.CharField(max_length=25)
MeterChuState = models.CharField(max_length=25)
MeterZhongState = models.CharField(max_length=25)
MeterState = models.CharField(max_length=25, default='初始')
MeterTest = models.CharField(max_length=25, default='空闲')
MeterRand_num = models.CharField(max_length=25, null=True)
Meteriport = models.CharField(max_length=50, null=True)
MeterTime = models.DateTimeField(auto_now_add=True, null=True) # 当前时间
MeterCancel = models.CharField(max_length=10,default="否") #是否取消测试
MeterEvery = models.CharField(max_length=32, default="00000000000000000000000000000000") #单项结果
MeterPrivilege = models.CharField(max_length=1,default="0") #优先
ManufactureName = models.ForeignKey('Manufacture', null=True) # 燃气表生产厂商 Manufacture_id
Subtime = models.DateTimeField(null=True) # 提交时间
CheckTime = models.DateTimeField(null=True) # 审核时间
class Meta:
ordering = ['-MeterTime'] #从大到小排序
class Meter_Result_Record(models.Model):
MeterId = models.CharField(max_length=12, primary_key=True, unique=True) #表号是主键
MeterType = models.CharField(max_length=10)
MeterComState = models.CharField(max_length=25)
MeterIcState = models.CharField(max_length=25)
MeterChuState = models.CharField(max_length=25)
MeterZhongState = models.CharField(max_length=25)
MeterState = models.CharField(max_length=25, default='初始')
MeterTest = models.CharField(max_length=25, default='空闲')
MeterRand_num = models.CharField(max_length=25, null=True)
Meteriport = models.CharField(max_length=50, null=True)
MeterTime = models.DateTimeField(auto_now_add=True, null=True) # 当前时间
MeterCancel = models.CharField(max_length=10,default="否") #是否取消测试
MeterEvery = models.CharField(max_length=32, default="00000000000000000000000000000000") #单项结果
MeterPrivilege = models.CharField(max_length=1,default="0") #优先
ManufactureName = models.ForeignKey('Manufacture', null=True) # 燃气表生产厂商 Manufacture_id
Subtime = models.DateTimeField(null=True) # 提交时间
CheckTime = models.DateTimeField(null=True) # 审核时间
class Meta:
ordering = ['-MeterTime'] #从大到小排序
class PlanInfo(models.Model):
id = models.IntegerField(primary_key=True,unique=True) #不自增,主键
IP = models.CharField(max_length=30)
PlatComs = models.CharField(max_length=20)
PlatIcs = models.CharField(max_length=20)
PlatChus_msb = models.CharField(max_length=20)
PlatChus_xzy = models.CharField(max_length=20)
PlatChus_csb = models.CharField(max_length=20)
PlatZhos = models.CharField(max_length=20)
PlatComp = models.CharField(max_length=20, null=True)
PlatIcp = models.CharField(max_length=20, null=True)
PlatChup_msb = models.CharField(max_length=20, null=True)
PlatChup_xzy = models.CharField(max_length=20, null=True)
PlatChup_csb = models.CharField(max_length=20, null=True)
PlatZhop = models.CharField(max_length=20, null=True)
class MeterPlat(models.Model):
MeterId = models.CharField(max_length=12, primary_key=True, unique=True) # 表号是主键
Meterip_Com = models.CharField(max_length=10, null=True)
Meterip_IC = models.CharField(max_length=10, null=True)
Meterip_Chu = models.CharField(max_length=10, null=True)
Meterip_Zhong = models.CharField(max_length=10, null=True) | [
"lu951120@163.com"
] | lu951120@163.com |
1488d8d1105bcc2a1c82f5ce153d9133fcd078ed | c11ea18bf126f4d4342ea66d0b4d7aa1cf9ab5f1 | /backend/base/urls/product_urls.py | d576db9fb7e9501351e9ad4daed1d1806e0e84d3 | [] | no_license | anoosh96/crow-shop | 2e0286160523de30a66ea9b16816804dcefd9b51 | ab9f4e228913f3ff44f70bb570c36119cb4578a3 | refs/heads/master | 2023-07-14T15:28:05.413236 | 2021-08-31T20:23:50 | 2021-08-31T20:23:50 | 399,087,354 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | from django.urls import path
from ..views import product_views as views
urlpatterns = [
path('',views.getProducts,name="products"),
path('<int:pk>/',views.getProduct,name="productDetail"),
] | [
"ahmed@kaloncouture.com"
] | ahmed@kaloncouture.com |
f95199ebe8d2e29d27355593b7a7a581061306cd | 65f03862400819dc09204c6036629ef3ab40194a | /plugin/default/crypto.py | 9ae774b1f60ae2af05caf1d11d18947c6af4a0ae | [
"MIT"
] | permissive | dpsbot-project/DPSBot | e6c2a1cb038655117f81bdcbe50416c91b2c1e93 | 90db398d9d5a0e4ce3dac3dfcd63d902440250af | refs/heads/master | 2023-03-09T12:48:53.687211 | 2022-12-05T00:14:06 | 2022-12-05T00:14:06 | 143,755,913 | 0 | 0 | MIT | 2023-02-07T21:53:20 | 2018-08-06T16:37:34 | Python | UTF-8 | Python | false | false | 1,092 | py | import asyncio
from discord.ext import commands
from cryptography.fernet import Fernet
class cryptoclass():
def __init__(self, bot):
self.bot = bot
privatekey = Fernet.generate_key()
self.cipher_suite = Fernet(privatekey)
@commands.command(name="encrypt", aliases=["암호화"], pass_context=True)
async def encrypt(self, ctx, *, plaintext=None):
if plaintext:
encryptedtext = self.cipher_suite.encrypt(
plaintext.content.encode("utf-8")).decode()
await self.bot.say(encryptedtext)
else:
await self.bot.say(_('내용이 없습니다.'))
@commands.command(name="decrypt", aliases=["복호화"], pass_context=True)
async def decrypt(self, ctx, *, encryptedtext=None):
if encryptedtext:
plaintext = self.cipher_suite.decrypt(
encryptedtext.content.encode("utf-8")).decode()
await self.bot.say(plaintext)
else:
await self.bot.say(_('내용이 없습니다.'))
def setup(bot):
bot.add_cog(cryptoclass(bot))
| [
"jh001007@gmail.com"
] | jh001007@gmail.com |
5d50b7230df3a58ddc09184f562a48db1f72f001 | 01956c956446066768f2e1242e69966bab7286e8 | /utils/basefile.py | 3810eced9db3495554a729d921c6fd62a7ede2d1 | [] | no_license | sharly2012/wechat-applet-test | b6f627862c52f2eec7df267707c36bae4a285e7d | dd6ba913509865151a45926262ac96268cdc02bf | refs/heads/master | 2022-12-10T08:24:49.195097 | 2019-04-29T09:57:16 | 2019-04-29T09:57:16 | 184,044,678 | 1 | 0 | null | 2022-12-08T05:02:38 | 2019-04-29T09:50:51 | Python | UTF-8 | Python | false | false | 1,629 | py | import os
import shutil
from utils.logger import Logger
from utils.baseutil import BaseUtil
logger = Logger("basefile").get_log()
def files_and_dirs_list(dir_path):
"""ls the files in the folder"""
for root, dirs, files in os.walk(dir_path):
logger.info("The root is: %s" % root)
logger.info("The dirs are: %s" % dirs)
logger.info("The files are: %s" % files)
def all_files(dir_path):
"""输出文件夹下所有文件名(不包括文件夹)"""
for file in os.listdir(dir_path):
print(file)
def delete_folder(folder_path):
"""delete the folder and the files in the folder"""
if os.path.exists(folder_path):
shutil.rmtree(folder_path)
logger.info("The Folder %s had been deleted" % folder_path)
else:
logger.info("Folder %s is not exist" % folder_path)
def make_folder(folder_path):
"""create folder"""
if os.path.exists(folder_path):
logger.info("Folder %s is exist" % folder_path)
else:
os.mkdir(folder_path)
logger.info("Folder %s is created" % folder_path)
def copy_folder(olddir_path, newdir_path):
"""copy folder,olddir and newdir must be folder type"""
delete_folder(newdir_path)
if os.path.exists(newdir_path):
shutil.rmtree(newdir_path)
shutil.copytree(olddir_path, newdir_path)
if __name__ == '__main__':
screenshots_folder = BaseUtil().root_path + "/screenshots/恭和家园"
make_folder(screenshots_folder)
current_time = BaseUtil().get_current_time()
result_images = screenshots_folder + "/" + current_time
make_folder(result_images)
| [
"xingl@elab-plus.com"
] | xingl@elab-plus.com |
45647e63c1b0158accfd7b668922f9f549e5fd56 | 39522478bc38ac586b6bafa5e8462cedaddffaf1 | /asynchronous IO/asyncio_task1.py | ae8051c053327b78d1303ba7b63515fe96f534c7 | [] | no_license | Miyanaqy/PythonLearn | 011eeef631693a832d9798e43e3dbf83edb3b77d | ed4636d02e43e6c362ebef5c5f845a839a5bdae0 | refs/heads/master | 2021-01-19T08:02:08.851507 | 2017-05-20T09:31:14 | 2017-05-20T09:31:14 | 87,596,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | import threading
import asyncio
@asyncio.coroutine
def hello():
print('Hello world! (%s)' % threading.currentThread())
yield from asyncio.sleep(1)
print('Hello again! (%s)' % threading.currentThread())
loop = asyncio.get_event_loop()
tasks = [hello(), hello(), hello()]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
| [
"2247762766@qq.com"
] | 2247762766@qq.com |
a10d864424683827df934951ff4cb07416e8d969 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2020_12_01/aio/operations/_private_link_resources_operations.py | a7c4a66aa9351e0ab6a575929711ac78f42085cb | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 5,000 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PrivateLinkResourcesOperations:
"""PrivateLinkResourcesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerservice.v2020_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list(
self,
resource_group_name: str,
resource_name: str,
**kwargs
) -> "_models.PrivateLinkResourcesListResult":
"""Gets a list of private link resources in the specified managed cluster.
Gets a list of private link resources in the specified managed cluster. The operation returns
properties of each private link resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateLinkResourcesListResult, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_12_01.models.PrivateLinkResourcesListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateLinkResourcesListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/privateLinkResources'} # type: ignore
| [
"noreply@github.com"
] | noreply@github.com |
cf68f6c4ab005f0fe8ee4f5b2477383a7c5b3c99 | a00ed711e3e08b50ad6e91cc07a2cddc4a1de5ea | /tests/models/test_param.py | 3529f0360cdcfbcedfe6aa3802c9aedb473ab05d | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ishiis/airflow | 4305794e36b611d01f49e3f2401be3dc49782670 | 292440d54f4db84aaf0c5a98cf5fcf34303f2fa8 | refs/heads/master | 2022-07-30T00:51:28.806940 | 2022-07-14T12:07:11 | 2022-07-14T12:07:11 | 209,801,072 | 1 | 0 | Apache-2.0 | 2019-09-20T13:47:26 | 2019-09-20T13:47:26 | null | UTF-8 | Python | false | false | 9,932 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from contextlib import nullcontext
import pytest
from airflow.decorators import task
from airflow.exceptions import ParamValidationError
from airflow.models.param import Param, ParamsDict
from airflow.utils import timezone
from airflow.utils.types import DagRunType
from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom
class TestParam(unittest.TestCase):
def test_param_without_schema(self):
p = Param('test')
assert p.resolve() == 'test'
p.value = 10
assert p.resolve() == 10
def test_null_param(self):
p = Param()
with pytest.raises(ParamValidationError, match='No value passed and Param has no default value'):
p.resolve()
assert p.resolve(None) is None
p = Param(None)
assert p.resolve() is None
assert p.resolve(None) is None
p = Param(type="null")
p = Param(None, type='null')
assert p.resolve() is None
assert p.resolve(None) is None
with pytest.raises(ParamValidationError):
p.resolve('test')
def test_string_param(self):
p = Param('test', type='string')
assert p.resolve() == 'test'
p = Param('test')
assert p.resolve() == 'test'
p = Param('10.0.0.0', type='string', format='ipv4')
assert p.resolve() == '10.0.0.0'
p = Param(type='string')
with pytest.raises(ParamValidationError):
p.resolve(None)
with pytest.raises(ParamValidationError, match='No value passed and Param has no default value'):
p.resolve()
def test_int_param(self):
p = Param(5)
assert p.resolve() == 5
p = Param(type='integer', minimum=0, maximum=10)
assert p.resolve(value=5) == 5
with pytest.raises(ParamValidationError):
p.resolve(value=20)
def test_number_param(self):
p = Param(42, type='number')
assert p.resolve() == 42
p = Param(1.0, type='number')
assert p.resolve() == 1.0
with pytest.raises(ParamValidationError):
p = Param('42', type='number')
p.resolve()
def test_list_param(self):
p = Param([1, 2], type='array')
assert p.resolve() == [1, 2]
def test_dict_param(self):
p = Param({'a': 1, 'b': 2}, type='object')
assert p.resolve() == {'a': 1, 'b': 2}
def test_composite_param(self):
p = Param(type=["string", "number"])
assert p.resolve(value="abc") == "abc"
assert p.resolve(value=5.0) == 5.0
def test_param_with_description(self):
p = Param(10, description='Sample description')
assert p.description == 'Sample description'
def test_suppress_exception(self):
p = Param('abc', type='string', minLength=2, maxLength=4)
assert p.resolve() == 'abc'
p.value = 'long_string'
assert p.resolve(suppress_exception=True) is None
def test_explicit_schema(self):
p = Param('abc', schema={type: "string"})
assert p.resolve() == 'abc'
def test_custom_param(self):
class S3Param(Param):
def __init__(self, path: str):
schema = {"type": "string", "pattern": r"s3:\/\/(.+?)\/(.+)"}
super().__init__(default=path, schema=schema)
p = S3Param("s3://my_bucket/my_path")
assert p.resolve() == "s3://my_bucket/my_path"
with pytest.raises(ParamValidationError):
p = S3Param("file://not_valid/s3_path")
p.resolve()
def test_value_saved(self):
p = Param("hello", type="string")
assert p.resolve("world") == "world"
assert p.resolve() == "world"
def test_dump(self):
p = Param('hello', description='world', type='string', minLength=2)
dump = p.dump()
assert dump['__class'] == 'airflow.models.param.Param'
assert dump['value'] == 'hello'
assert dump['description'] == 'world'
assert dump['schema'] == {'type': 'string', 'minLength': 2}
class TestParamsDict:
def test_params_dict(self):
# Init with a simple dictionary
pd = ParamsDict(dict_obj={'key': 'value'})
assert isinstance(pd.get_param('key'), Param)
assert pd['key'] == 'value'
assert pd.suppress_exception is False
# Init with a dict which contains Param objects
pd2 = ParamsDict({'key': Param('value', type='string')}, suppress_exception=True)
assert isinstance(pd2.get_param('key'), Param)
assert pd2['key'] == 'value'
assert pd2.suppress_exception is True
# Init with another object of another ParamsDict
pd3 = ParamsDict(pd2)
assert isinstance(pd3.get_param('key'), Param)
assert pd3['key'] == 'value'
assert pd3.suppress_exception is False # as it's not a deepcopy of pd2
# Dump the ParamsDict
assert pd.dump() == {'key': 'value'}
assert pd2.dump() == {'key': 'value'}
assert pd3.dump() == {'key': 'value'}
# Validate the ParamsDict
plain_dict = pd.validate()
assert type(plain_dict) == dict
pd2.validate()
pd3.validate()
# Update the ParamsDict
with pytest.raises(ParamValidationError, match=r'Invalid input for param key: 1 is not'):
pd3['key'] = 1
# Should not raise an error as suppress_exception is True
pd2['key'] = 1
pd2.validate()
def test_update(self):
pd = ParamsDict({'key': Param('value', type='string')})
pd.update({'key': 'a'})
internal_value = pd.get_param('key')
assert isinstance(internal_value, Param)
with pytest.raises(ParamValidationError, match=r'Invalid input for param key: 1 is not'):
pd.update({'key': 1})
class TestDagParamRuntime:
VALUE = 42
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
@staticmethod
def clean_db():
clear_db_runs()
clear_db_dags()
clear_db_xcom()
def setup_class(self):
self.clean_db()
def teardown_method(self):
self.clean_db()
def test_dag_param_resolves(self, dag_maker):
"""Test dagparam resolves on operator execution"""
with dag_maker(dag_id="test_xcom_pass_to_op") as dag:
value = dag.param('value', default=self.VALUE)
@task
def return_num(num):
return num
xcom_arg = return_num(value)
dr = dag_maker.create_dagrun(
run_id=DagRunType.MANUAL.value,
start_date=timezone.utcnow(),
)
xcom_arg.operator.run(dr.execution_date, dr.execution_date)
ti = dr.get_task_instances()[0]
assert ti.xcom_pull() == self.VALUE
def test_dag_param_overwrite(self, dag_maker):
"""Test dag param is overwritten from dagrun config"""
with dag_maker(dag_id="test_xcom_pass_to_op") as dag:
value = dag.param('value', default=self.VALUE)
@task
def return_num(num):
return num
xcom_arg = return_num(value)
assert dag.params['value'] == self.VALUE
new_value = 2
dr = dag_maker.create_dagrun(
run_id=DagRunType.MANUAL.value,
start_date=timezone.utcnow(),
conf={'value': new_value},
)
xcom_arg.operator.run(dr.execution_date, dr.execution_date)
ti = dr.get_task_instances()[0]
assert ti.xcom_pull() == new_value
def test_dag_param_default(self, dag_maker):
"""Test dag param is retrieved from default config"""
with dag_maker(dag_id="test_xcom_pass_to_op", params={'value': 'test'}) as dag:
value = dag.param('value')
@task
def return_num(num):
return num
xcom_arg = return_num(value)
dr = dag_maker.create_dagrun(run_id=DagRunType.MANUAL.value, start_date=timezone.utcnow())
xcom_arg.operator.run(dr.execution_date, dr.execution_date)
ti = dr.get_task_instances()[0]
assert ti.xcom_pull() == 'test'
@pytest.mark.parametrize(
'default, should_warn',
[
pytest.param({0, 1, 2}, True, id='default-non-JSON-serializable'),
pytest.param(None, False, id='default-None'), # Param init should not warn
pytest.param({"b": 1}, False, id='default-JSON-serializable'), # Param init should not warn
],
)
def test_param_json_warning(self, default, should_warn):
warning_msg = 'The use of non-json-serializable params is deprecated'
cm = pytest.warns(DeprecationWarning, match=warning_msg) if should_warn else nullcontext()
with cm:
p = Param(default=default)
p.resolve() # when resolved with NOTSET, should not warn.
p.resolve(value={'a': 1}) # when resolved with JSON-serializable, should not warn.
with pytest.warns(DeprecationWarning, match=warning_msg):
p.resolve(value={1, 2, 3}) # when resolved with not JSON-serializable, should warn.
| [
"noreply@github.com"
] | noreply@github.com |
f4056f860df1771e62dd5010d3a51ea2059537d3 | 6dc761a30cf5efa045f1154aaff2acfa139b835a | /LeetCode/Python/majorityElement.py | 2c3a07d29edec31ce28f3cebf1b76d1b29269efe | [] | no_license | snail15/AlgorithmPractice | 4e58beee3ff76498a389268dd4cc207dcabf778e | 9e8885953ad50e966454c45c460e81dbb6e48be0 | refs/heads/master | 2021-08-17T06:30:02.290260 | 2021-06-08T01:15:07 | 2021-06-08T01:15:07 | 98,246,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 654 | py | # Given an array of size n, find the majority element. The majority element is the element that appears more than ⌊ n/2 ⌋ times.
# You may assume that the array is non-empty and the majority element always exist in the array.
# Example 1:
# Input: [3,2,3]
# Output: 3
# Example 2:
# Input: [2,2,1,1,1,2,2]
# Output: 2
def majorityElement(self, nums: List[int]) -> int:
counter = {}
target = len(nums) // 2 + 1
for num in nums:
if not counter.get(num):
counter[num] = 1
else:
counter[num] += 1
if counter[num] >= target:
return num | [
"jungs@uchicago.edu"
] | jungs@uchicago.edu |
ba98c671a10f8e25d9232aa6261b7662b7fed330 | 4c3a950fbc7c23187d55be6c9d6cd5756a9c2b89 | /mezzanine_blocks/urls.py | 97e594d263863eee2cb64439151c37600c2d624c | [
"BSD-2-Clause"
] | permissive | dfalk/mezzanine-blocks | a0e35b0a3e7f37e824f8075c6d8becdf5e051d76 | cad7cc1b8bc9336093a7716bb57727e3d8080cc1 | refs/heads/master | 2021-01-16T18:53:46.662802 | 2012-06-02T07:35:54 | 2012-06-02T07:35:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | from django.conf.urls.defaults import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from views import edit
urlpatterns = patterns('',
url('^edit/(?P<pk>\d+)/$', staff_member_required(edit), name='blocks-edit')
)
| [
"renyi.ace@gmail.com"
] | renyi.ace@gmail.com |
bca5cd8e84a3c9d4f35f76a723f21cf1146bba0d | 02565573516bba93467736c97541971f343327be | /ranks/ranks/spiders/rank_collecto.py | 0c069ab2590fa996327dbc9acb536a7262ff3a5c | [] | no_license | qiaoxiaoqiao1/Math564Project | 42e6fa9690dd29b8104f2e7f60f3db178fb6ede0 | 2b672189beb0134c5ca5e9fb9ed0fdcc038e4e70 | refs/heads/master | 2020-04-03T21:52:37.881989 | 2018-10-31T18:34:05 | 2018-10-31T18:34:05 | 155,585,207 | 0 | 0 | null | 2018-10-31T15:59:47 | 2018-10-31T15:59:46 | null | UTF-8 | Python | false | false | 1,471 | py | # -*- coding: utf-8 -*-
import scrapy
class RankCollectoSpider(scrapy.Spider):
name = 'rank_collecto'
# allowed_domains = ['www.espn.com/nba/standings/_/season/2018/group/league']
start_urls = ['http://www.espn.com/nba/standings/_/season/2014/group/league/']
def parse(self, response):
team_names = response.xpath('//td[@class = "v-top"]/table/tbody/tr')
teams_stats = response.xpath('//td[@class = "v-top"]/div/div/div/table/tbody/tr/td/table/tbody/tr')
for n in range(0,30):
team_name = team_names[n].xpath('.//span[@class = "dn show-mobile"]/a/abbr/text()').extract()
team_stat = teams_stats[n]
win = team_stat.xpath('.//td[1]/span/text()').extract()
loss = team_stat.xpath('.//td[2]/span/text()').extract()
pct = team_stat.xpath('.//td[3]/span/text()').extract()
home = team_stat.xpath('.//td[5]/span/text()').extract()
away = team_stat.xpath('.//td[6]/span/text()').extract()
div = team_stat.xpath('.//td[7]/span/text()').extract()
conf = team_stat.xpath('.//td[8]/span/text()').extract()
ppg = team_stat.xpath('.//td[9]/span/text()').extract()
opp_ppg = team_stat.xpath('.//td[10]/span/text()').extract()
diff = team_stat.xpath('.//td[11]/span/text()').extract()
print(team_name)
print(diff)
yield{'team_name': team_name,
'win':win,
'loss':loss,
'pct':pct,
'home':home,
'away':away,
'div':div,
'conf': conf,
'ppg': ppg,
'opp_ppg':opp_ppg,
'diff':diff
} | [
"cli112@hawk.iit.edu"
] | cli112@hawk.iit.edu |
ab1409aaf95d2bf69bc496ba2c8a4938816631bd | 3b7b6648b72910046b6a227db30f71aeee2cba9c | /2020-12-18-neural-style-transfer/deeptools/preprocessing/RandomSingleCropPreprocessor.py | 4ddf0ecef9eedb517ec472e48447e933c6d54b45 | [] | no_license | ken2190/deep-learning-study | f2abeb1cd302e405a15bbb52188ae44ffb414e2f | f2998be89d0c931176f158ae5f48ca562786e171 | refs/heads/main | 2023-04-02T05:07:08.504212 | 2021-04-11T15:11:22 | 2021-04-11T15:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | from sklearn.feature_extraction.image import extract_patches_2d
# this processor randomly crop an image of fixed size.
class RandomSingleCropPreprocessor:
def __init__(self, width, height):
self.width = width
self.height = height
def preprocess(self, image):
return extract_patches_2d(image, (self.height, self.width), max_patches=1)[0]
# from PIL import Image
# import numpy as np
# pp = RandomSingleCropPreprocessor(200, 200)
# im = np.array(Image.open('pyimagesearch/preprocessing/test.png'))
# Image.fromarray(pp.preprocess(im)).show()
| [
"machingclee@gmail.com"
] | machingclee@gmail.com |
b890716819c35bfdf6811e2bfe263b08a7d67c29 | 06e987d21c6e0cdad48bd45bcf602b841490c428 | /src/translator/modules/fx.py | 6c0e11c4f3d05eb6b291beb2ab942d8b03003456 | [] | no_license | westlicht/omxone | 5f2512ec0493eb637a4f211073175c647805db7f | af92fb08c654eb50ea583199c94d626867326699 | refs/heads/master | 2021-01-15T19:45:20.313616 | 2018-09-29T19:10:05 | 2018-09-29T19:10:05 | 1,063,451 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,253 | py |
import time
from log import Logger
from translator.base import Translator
from translator.factory import TranslatorFactory
class TranslatorFx(Translator):
STATE_NORMAL = 0
STATE_MUTE = 1
STATE_FX1 = 2
STATE_FX2 = 3
SWITCH_INTERVAL = 0.02
state_names = {
STATE_NORMAL : 'Normal',
STATE_MUTE : 'Mute',
STATE_FX1 : 'FX1',
STATE_FX2 : 'FX2',
}
def __init__(self, core, options):
super(TranslatorFx, self).__init__(core, options)
self.controller = self.add_channel(options['controller'])
self.host = self.add_channel(options['host'])
self.add_key('main', self.__key_main, self.controller, int(options['key_main']))
self.add_key('fx1', self.__key_fx1, self.controller, int(options['key_fx1']))
self.add_key('fx2', self.__key_fx2, self.controller, int(options['key_fx2']))
self.add_key('reset', self.__key_reset, self.controller, int(options['key_reset']))
self.add_ctrl('master', self.host, int(options['cc_master']))
self.add_ctrl('send1', self.host, int(options['cc_send1']))
self.add_ctrl('send2', self.host, int(options['cc_send2']))
self.add_ctrl('selector', self.host, int(options['cc_selector']))
self.__set_state(self.STATE_NORMAL)
def __key_main(self, key):
if self.__state == self.STATE_NORMAL:
self.__set_state(self.STATE_MUTE)
else:
self.__set_state(self.STATE_NORMAL)
def __key_fx1(self, key):
self.__set_state(self.STATE_FX1)
def __key_fx2(self, key):
self.__set_state(self.STATE_FX2)
def __key_reset(self, key):
self.__set_state(self.STATE_NORMAL)
def __set_state(self, state):
self.__state = state
Logger.debug("Switching to state %s" % (self.state_names[self.__state]))
if self.__state == self.STATE_NORMAL:
self.__update_levels(127, 0, 0)
self.send_ctrl('master', 127)
time.sleep(self.SWITCH_INTERVAL)
self.send_ctrl('send1', 0)
self.send_ctrl('send2', 0)
elif self.__state == self.STATE_MUTE:
self.__update_levels(0, 0, 0)
self.send_ctrl('master', 0)
self.send_ctrl('send1', 0)
self.send_ctrl('send2', 0)
elif self.__state == self.STATE_FX1:
self.__update_levels(0, 127, 0)
self.send_ctrl('send1', 127)
time.sleep(self.SWITCH_INTERVAL)
self.send_ctrl('master', 0)
self.send_ctrl('send2', 0)
elif self.__state == self.STATE_FX2:
self.__update_levels(0, 0, 127)
time.sleep(self.SWITCH_INTERVAL)
self.send_ctrl('send2', 127)
self.send_ctrl('master', 0)
self.send_ctrl('send1', 0)
self.send_ctrl('selector', (self.__state / 3.0) * 127.0)
def __update_levels(self, master, send1, send2):
return
self.send_ctrl('master', master)
self.send_ctrl('send1', send1)
self.send_ctrl('send2', send2)
TranslatorFactory.register(TranslatorFx)
| [
"simon.kallweit@intefo.ch"
] | simon.kallweit@intefo.ch |
94681de536cc366afc4fbb90e2cae9b34317ef5d | 9b28c1650cf42c65cba5e63e989da0912729b8b2 | /titanic/knn.py | 073b7502bafc94583d36550e48a5977b5955b1e6 | [
"MIT"
] | permissive | chung1905-dump/kaggle_comp | a3108f523f06ec3df22b0de1adb4ef5ed459d87a | 2381620ee9a6ec170b0471c055405a2451169bc6 | refs/heads/master | 2022-07-09T13:05:20.521700 | 2020-05-02T12:08:02 | 2020-05-02T12:08:02 | 259,271,183 | 0 | 0 | MIT | 2022-06-22T01:50:51 | 2020-04-27T09:46:41 | Python | UTF-8 | Python | false | false | 1,345 | py | from collections import namedtuple
from typing import List
import numpy as np
train_data: List[List[float]] = []
Neighbor = namedtuple('Neighbor', 'nclass distance')
def _euclidean_distance(instance1: List[float], instance2: List[float]) -> float:
if not len(instance1) == len(instance2):
raise Exception('Exception euclid')
x = np.array(instance1)
y = np.array(instance2)
return np.linalg.norm(x - y)
def clear() -> None:
global train_data
train_data = []
def fit(*data: List[List[float]]):
global train_data
train_data.extend(data)
def predict(instance: List[float], k: int = 1) -> int:
global train_data
neighbor_list = _get_neighbor_list(instance, train_data)
sorted_distances = sorted(neighbor_list, key=lambda x: x.distance)
class_dict = {}
k_neighbors = sorted_distances[:k]
for i in k_neighbors:
n = class_dict.setdefault(i.nclass, 0)
class_dict[i.nclass] = n + 1
return max(class_dict, key=class_dict.get)
def _get_neighbor_list(instance, train_data) -> List[Neighbor]:
distances: List[Neighbor] = []
for n_class in range(0, len(train_data)):
for i in train_data[n_class]:
neighbor = Neighbor(nclass=n_class, distance=_euclidean_distance(instance, i))
distances.append(neighbor)
return distances
| [
"chung1905@gmail.com"
] | chung1905@gmail.com |
883b131aab7cc6403a4eb04a14315ce599a3fb52 | 60d6b8501d0be546437b26a6ee1f9fab97ec3897 | /platypush/message/event/zigbee/mqtt.py | e3179407f6aa9291e1c47fb4fbf836c0c6dbf740 | [
"MIT"
] | permissive | BlackLight/platypush | 68284a85b2f9eef303d26b04530f075927b5834a | 446bc2f67493d3554c5422242ff91d5b5c76d78a | refs/heads/master | 2023-08-31T21:01:53.519960 | 2023-08-29T22:05:38 | 2023-08-29T22:05:38 | 109,421,017 | 265 | 25 | MIT | 2023-09-01T23:15:49 | 2017-11-03T16:56:24 | Python | UTF-8 | Python | false | false | 5,542 | py | from typing import Dict, Any
from platypush.message.event import Event
class ZigbeeMqttEvent(Event):
pass
class ZigbeeMqttOnlineEvent(ZigbeeMqttEvent):
"""
Triggered when a zigbee2mqtt service goes online.
"""
def __init__(self, host: str, port: int, *args, **kwargs):
super().__init__(*args, host=host, port=port, **kwargs)
class ZigbeeMqttOfflineEvent(ZigbeeMqttEvent):
"""
Triggered when a zigbee2mqtt service goes offline.
"""
def __init__(self, host: str, port: int, *args, **kwargs):
super().__init__(*args, host=host, port=port, **kwargs)
class ZigbeeMqttDevicePropertySetEvent(ZigbeeMqttEvent):
"""
Triggered when a the properties of a Zigbee connected devices (state, brightness, alert etc.) change.
"""
def __init__(self, host: str, port: int, device: str, properties: Dict[str, Any], *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, properties=properties, **kwargs)
class ZigbeeMqttDevicePairingEvent(ZigbeeMqttEvent):
"""
Triggered when a device is pairing to the network.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttDeviceConnectedEvent(ZigbeeMqttEvent):
"""
Triggered when a device connects to the network.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttDeviceBannedEvent(ZigbeeMqttEvent):
"""
Triggered when a device is banned from the network.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttDeviceRemovedEvent(ZigbeeMqttEvent):
"""
Triggered when a device is removed from the network.
"""
def __init__(self, host: str, port: int, device=None, force=False, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, force=force, **kwargs)
class ZigbeeMqttDeviceRemovedFailedEvent(ZigbeeMqttEvent):
"""
Triggered when the removal of a device from the network failed.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttDeviceWhitelistedEvent(ZigbeeMqttEvent):
"""
Triggered when a device is whitelisted on the network.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttDeviceRenamedEvent(ZigbeeMqttEvent):
"""
Triggered when a device is renamed on the network.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttDeviceBindEvent(ZigbeeMqttEvent):
"""
Triggered when a device bind occurs on the network.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttDeviceUnbindEvent(ZigbeeMqttEvent):
"""
Triggered when a device bind occurs on the network.
"""
def __init__(self, host: str, port: int, device=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, device=device, **kwargs)
class ZigbeeMqttGroupAddedEvent(ZigbeeMqttEvent):
"""
Triggered when a group is added.
"""
def __init__(self, host: str, port: int, group=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, group=group, **kwargs)
class ZigbeeMqttGroupAddedFailedEvent(ZigbeeMqttEvent):
"""
Triggered when a request to add a group fails.
"""
def __init__(self, host: str, port: int, group=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, group=group, **kwargs)
class ZigbeeMqttGroupRemovedEvent(ZigbeeMqttEvent):
"""
Triggered when a group is removed.
"""
def __init__(self, host: str, port: int, group=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, group=group, **kwargs)
class ZigbeeMqttGroupRemovedFailedEvent(ZigbeeMqttEvent):
"""
Triggered when a request to remove a group fails.
"""
def __init__(self, host: str, port: int, group=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, group=group, **kwargs)
class ZigbeeMqttGroupRemoveAllEvent(ZigbeeMqttEvent):
"""
Triggered when all the devices are removed from a group.
"""
def __init__(self, host: str, port: int, group=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, group=group, **kwargs)
class ZigbeeMqttGroupRemoveAllFailedEvent(ZigbeeMqttEvent):
"""
Triggered when a request to remove all the devices from a group fails.
"""
def __init__(self, host: str, port: int, group=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, group=group, **kwargs)
class ZigbeeMqttErrorEvent(ZigbeeMqttEvent):
"""
Triggered when an error happens on the zigbee2mqtt service.
"""
def __init__(self, host: str, port: int, error=None, *args, **kwargs):
super().__init__(*args, host=host, port=port, error=error, **kwargs)
# vim:sw=4:ts=4:et:
| [
"blacklight86@gmail.com"
] | blacklight86@gmail.com |
48a5ecc56a364e379ab32c82cdb8a6c1567cbdb3 | 47005bd473fd78e5026adf3be6708996f3d5c8d7 | /core/migrations/0012_playerstats_receptions.py | d16bc4f19e25813a9da5d7a468ec1cfaa19b8d83 | [] | no_license | andrijan/mfl-commish | 7cfd2d96323484341e20215ff85e4e5cf081e5a1 | a3db2d346e8d21f13c68e791a35ec20a13973c45 | refs/heads/master | 2023-08-23T03:29:05.834989 | 2023-08-03T20:30:20 | 2023-08-03T20:30:20 | 193,875,362 | 1 | 0 | null | 2022-12-08T10:55:05 | 2019-06-26T09:40:03 | Python | UTF-8 | Python | false | false | 380 | py | # Generated by Django 3.1 on 2021-08-05 22:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0011_playerstats'),
]
operations = [
migrations.AddField(
model_name='playerstats',
name='receptions',
field=models.IntegerField(default=0),
),
]
| [
"andrijan@gmail.com"
] | andrijan@gmail.com |
642dace9a98ba086429328e4a3bb682bf656ef68 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03139/s354736213.py | c172482303f5c5982f40aa05ddff89c507c32649 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | a,b,c = [int(i) for i in input().split()]
print(min(b,c),b+c-a if b + c >= a else 0) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
4157b1dda27ee6318c2f824112b3276fa99c85df | 7625dbb2792d2d139b151868afdf04dffe7540b1 | /test.py | de17f3c17a8b85a04c5de81a3a0a1a2be7f1eb6f | [] | no_license | Asphilia/Numerik2 | a5cd045aa7cb076be2c32ac35188542378b011e9 | 48972763c357b570ef0431daf394a47cf84901bb | refs/heads/master | 2022-11-14T03:07:41.957605 | 2020-07-13T13:03:40 | 2020-07-13T13:03:40 | 278,065,696 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,496 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 8 09:46:50 2020
@author: felix
"""
#!/usr/bin/env python3
import plotly.graph_objects as go
import plotly.express as px
import pandas as pd
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import scipy.fftpack as sfft
import plotly.express as px
# Quantisierungsmatrix aus dem Lesetext
Q1 = np.array([[8,16,24,32,40,48,56,64],
[16,24,32,40,48,56,64,72],
[24,32,40,48,56,64,72,80],
[32,40,48,56,64,72,80,88],
[40,48,56,64,72,80,88,96],
[48,56,64,72,80,88,96,104],
[56,64,72,80,88,96,104,112],
[64,72,80,88,96,104,112,120]])
# Q aus Aufgabe 2
Q2 = np.array([[16,11,10,16,24,40,51,61],
[12,12,14,19,26,58,60,55],
[14,13,16,24,40,57,69,56],
[14,17,22,29,51,87,80,62],
[18,22,37,56,68,109,103,77],
[24,35,55,64,81,104,113,92],
[49,64,78,87,103,121,120,101],
[72,92,95,98,112,100,103,99]])
def encode_quant(orig, quant):
return (orig / quant).astype(np.int)
def decode_quant(orig, quant):
return (orig * quant).astype(float)
def encode_dct(orig, bx, by):
new_shape = (
orig.shape[0] // bx * bx,
orig.shape[1] // by * by
)
new = orig[
:new_shape[0],
:new_shape[1]
].reshape((
new_shape[0] // bx,
bx,
new_shape[1] // by,
by
))
return sfft.dctn(new, axes=[1,3], norm='ortho')
def decode_dct(orig, bx, by):
return sfft.idctn(orig, axes=[1,3], norm='ortho'
).reshape((
orig.shape[0]*bx,
orig.shape[2]*by
))
# Load image
lena = Image.open('lena.jpg')
print(lena)
plt.figure()
plt.imshow(lena, cmap = plt.get_cmap('Greys_r'))
x = np.array(lena)
quants = [1,2,4,10] # q values
blocks = [(8,8)] # block size 8x8
decs = []
for qscale in quants:
for bx,by in blocks:
quant = (
((Q1 * (qscale)))
.clip(-100, 100) # to prevent clipping
.reshape((1, bx, 1, by))
)
enc = encode_dct(x,bx,by)
encq = encode_quant(enc, quant)
decq = decode_quant(encq, quant)
dec = decode_dct(decq, bx, by)
decs.append((dec,qscale))
for i in decs:
print(i[0].shape)
reconstructed = Image.fromarray(i[0].astype(np.uint8),'L')
p = i[1]
plt.figure()
plt.title(f'q = {p}')
plt.imshow(reconstructed, cmap = plt.get_cmap('Greys_r'))
plt.show() | [
"reichlin@hm.edu"
] | reichlin@hm.edu |
c0887733727dd48a5860810b037f39ae174f2319 | 6b94e53e414700a47d626c0bedad2989665f3cbe | /ardana_service/osinstall.py | d4c1604485651167f2e7786f6a187a667a658f79 | [
"Apache-2.0"
] | permissive | toabctl/ardana-service | 2e6a013bb7b42d948dc52c0f5eafc885213e2341 | 5ae470b4efdb9932b173be860d69b57eca55c5cd | refs/heads/master | 2021-01-02T09:06:13.814438 | 2017-07-31T20:51:35 | 2017-07-31T23:11:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | from flask import Blueprint
import logging
LOG = logging.getLogger(__name__)
bp = Blueprint('osinstall', __name__)
@bp.route("/api/v2/osinstall", methods=['POST'])
def start_os_install():
return 'OS Install initiated', 201
@bp.route("/api/v2/osinstall", methods=['GET'])
def get_os_install_status():
return 'Success'
| [
"gary.smith@suse.com"
] | gary.smith@suse.com |
d1eb06dc28e84efc58bbc3b666fa0f1fb7bc8c48 | c5b608aadf67c7d5159543bc13e09b510bab7b8e | /pylib/cile_torquescript.py | 9e57b675459195cfc8d94f2cf860d00d3cfbf440 | [] | no_license | Torque3D-Resources/KomodoTS | cdc45f6d326f837a803c1664ba4c1b95dedb7ba7 | b49abef919899ba7a2d0357f0385cf2246c192e1 | refs/heads/master | 2020-12-02T16:23:38.246521 | 2015-08-20T19:07:22 | 2015-08-20T19:07:22 | 96,546,272 | 0 | 0 | null | 2017-07-07T14:25:05 | 2017-07-07T14:25:05 | null | UTF-8 | Python | false | false | 12,155 | py | #!/usr/bin/env python
"""A Code Intelligence Language Engine for the TorqueScript language.
A "Language Engine" is responsible for scanning content of
its language and generating CIX output that represents an outline of
the code elements in that content. See the CIX (Code Intelligence XML)
format:
http://community.activestate.com/faq/codeintel-cix-schema
Module Usage:
from cile_torquescript import scan
mtime = os.stat("bar.torquescript")[stat.ST_MTIME]
content = open("bar.torquescript", "r").read()
scan(content, "bar.torquescript", mtime=mtime)
"""
__version__ = "1.0.0"
import os
import sys
import time
import optparse
import logging
import pprint
import glob
from grammars import ytab, torque_lex, yconsts, astnodes
from grammars.astnodes import *
# Note: c*i*ElementTree is the codeintel system's slightly modified
# cElementTree. Use it exactly as you would the normal cElementTree API:
# http://effbot.org/zone/element-index.htm
import ciElementTree as ET
from codeintel2.common import CILEError
#---- exceptions
class TorqueScriptCILEError(CILEError):
pass
#---- global data
log = logging.getLogger("cile.torquescript")
log.setLevel(logging.DEBUG)
#---- public module interface
IMPORT_PACKAGE = 1
IMPORT_EXEC = 2
BASE_TYPE = "ConsoleVar"
EXPR_CLASS = {
"ObjectDecl" : [ObjectDeclNode],
"Variable" : [VarNode],
"Assignment" : [SlotAssignOpNode, SlotAssignNode, AssignExprNode, AssignOpExprNode],
"String" : [StrcatExprNode],
#"Tag" : [TTagExprNode, TTagDerefNode], # It seems these are unused
"Int" : [StreqExprNode, IntUnaryExprNode, IntBinaryExprNode],
"Float" : [FloatUnaryExprNode, FloatBinaryExprNode],
"Conditional" : [ConditionalExprNode],
"Error" : [ExprNode,BinaryExprNode] ,# We should never get one of these that isn't a subclass of some sort
"Constant" : [IntNode, FloatNode, StrConstNode, ConstantNode],
"SlotAccess" : [SlotAccessNode,InternalSlotAccessNode],
"Other" : [CommaCatExprNode, AssertCallExprNode, FuncCallExprNode],
}
def test_elt_attrs(elt, **kwargs):
return not sum(elt.get(attr) != val for attr,val in kwargs.iteritems())
def is_func(node, name):
return isinstance(node, FuncCallExprNode) and node.func_name.lower() == name and not (node.call_type or node.namespace)
def add_var(elt, name, citdl=None, local=True, arg=False):
if not citdl:
citdl = BASE_TYPE
return ET.SubElement(elt,"variable",name=name,citdl=citdl,attributes=("__local__" if local else ""),ilk=("argument" if arg else ""))
def add_function_args(func_elt, func_decl):
for var in func_decl.args.stmts:
if var.array_index_expr:
pass
#log.warn("--->Var %s has an array_index_expression, but it appears in a function header", var.var_name)
#log.debug("--->Adding argument %s",var.var_name)
arg = add_var(func_elt,var.var_name, citdl=func_decl.namespace, arg=True)
def handle_expr(statement, default_blob, script_root, local_namespace):
if is_func(statement, "activatepackage"):
is_import = IMPORT_PACKAGE
elif is_func(statement, "exec"):
is_import = IMPORT_EXEC
else:
is_import = False
#log.info("-->Evaluating expression for variable declarations and type inferencing")
if is_import:
#log.info("-->It's an %s-type import", ("exec" if is_import == IMPORT_EXEC else "package"))
num_args = len(statement.args.stmts)
if not num_args:
#log.warn("--->With no arguments, skipping")
pack_name_node = None
elif num_args == 1:
package_name_node, = statement.args.stmts
elif is_import == IMPORT_EXEC and num_args > 1:
package_name_node = statement.args.stmts[0]
else:
#log.warn("--->Something was wrong with it")
package_name_node = None
if not package_name_node:
name_data = ""
elif isinstance(package_name_node,(ConstantNode, StrConstNode)):
name_data = package_name_node.value
elif isinstance(package_name_node,ExprNode):
#log.warn("--->This is a programmatically generated import, let's skip it")
name_name, name_data, name_type = handle_expr(package_name_node, default_blob, script_root, local_namespace)
# this really needs to be processed more
else:
pass
#log.warn("--->We have an import expression of unknown provenance")
if name_data and is_import == IMPORT_EXEC:
name_data = find_exec_file(name_data)
if name_data:
do_import = ET.SubElement(default_blob, "import", name=str(name_data), symbol="*")
expr_name = ""
expr_data = ""
expr_types = [type_name for type_name in (BASE_TYPE,"IntType") if type_name == BASE_TYPE or is_import == IMPORT_EXEC]
else:
expr_name = ""
expr_data = ""
expr_types = [BASE_TYPE]
if isinstance(statement, (AssignExprNode, AssignOpExprNode)):
# it's a variable
# handle assignment
pass
elif isinstance(statement, (SlotAssignNode, SlotAssignOpNode)):
# it's an array
# handle assignment
pass
#nv_name, expr_data
return (expr_name,expr_data," ".join(expr_types))
def find_exec_file(file_name):
# this is a stub...
return file_name
def extract_blob_data(ast, default_blob, script_root, local_namespace=None):
# we know this is a statement list
# that's all that yyparse will ever give us
exports_symbols = False # Let's see what we find here
# Assignment: 'AssignExprNode', 'AssignOpExprNode', SlotAssignNode','SlotAssignOpNode',
# List: 'StmtList'
# Recurse Tree: 'ReturnStmtNode', 'ExprNode'
# 'FloatBinaryExprNode', 'FloatNode', 'FloatUnaryExprNode', 'FuncCallExprNode',
# Declaration: 'FunctionDeclStmtNode',ObjectDeclNode
# Tricky: IfStmtNode, LoopStmtNode
# Useless: 'BreakStmtNode','ContinueStmtNode',
# Ignore: 'VarNode'
# Dunno: InternalSlotAccessNode
if not local_namespace:
local_namespace = default_blob
for statement in ast.stmts:
if isinstance(statement, StmtList):
extract_blob_data(statement, default_blob, script_root, local_namespace=local_namespace)
# We need a better heuristic for who we might be doc'ing
elif isinstance(statement, StrConstNode) and statement.doc:
pass
elif isinstance(statement, IfStmtNode):
if statement.test_expr:
handle_expr(statement.test_expr, default_blob, script_root, local_namespace)
elif isinstance(statement, LoopStmtNode):
pass
elif isinstance(statement, FunctionDeclStmtNode):
# declare function
if statement.package_name:
# add a new module or get an existing one
lang = default_blob.get('lang')
for parent_module in script_root.findall("./scope"):
if test_elt_attrs(parent_module, ilk="blob", lang=lang, name=statement.package_name):
break
else:
parent_module = ET.SubElement(script_root,"scope",ilk="blob",lang=lang,name=statement.package_name)
else:
parent_module = default_blob
if statement.namespace:
# find the appropriate namespace, or add a new one
for parent_elt in parent_module.findall("./scope"):
if test_elt_attrs(parent_elt, ilk="class", name=statement.namespace):
break
else:
parent_elt = ET.SubElement(parent_module,"scope",ilk="class",name=statement.namespace)
else:
parent_elt = parent_module
#log.debug("-->Declaring function: %s%s%s",
# ((statement.package_name + "::") if statement.package_name else ""),
# ((statement.namespace + "::") if statement.namespace else ""),
# statement.fn_name)
new_function = ET.SubElement(parent_elt,"scope",ilk="function",name=statement.fn_name)
add_function_args(new_function,statement)
extract_blob_data(statement.stmts ,default_blob, script_root, local_namespace=new_function)
elif isinstance(statement, ObjectDeclNode):
# declare namespace
pass
# default behavior, this should be last, since many of the above are subclasses of ExprNode that we know how to handle better
elif isinstance(statement, (ExprNode, ReturnStmtNode)):
# recurse the tree for this expression, and extract any assignment expressions
handle_expr((statement if isinstance(statement, ExprNode) else statement.expr),
default_blob, script_root, local_namespace)
def scan_buf(buf, mtime=None, lang="TorqueScript"):
"""Scan the given TorqueScriptBuffer return an ElementTree (conforming
to the CIX schema) giving a summary of its code elements.
@param buf {TorqueScriptBuffer} is the TorqueScript buffer to scan
@param mtime {int} is a modified time for the file (in seconds since
the "epoch"). If it is not specified the _current_ time is used.
Note that the default is not to stat() the file and use that
because the given content might not reflect the saved file state.
"""
# Dev Notes:
# - This stub implementation of the TorqueScript CILE return an "empty"
# summary for the given content, i.e. CIX content that says "there
# are no code elements in this TorqueScript content".
# - Use the following command (in the extension source dir) to
# debug/test your scanner:
# codeintel scan -p -l TorqueScript <example-TorqueScript-file>
# "codeintel" is a script available in the Komodo SDK.
#log.info("scan '%s'", buf.path)
if mtime is None:
mtime = int(time.time())
# The 'path' attribute must use normalized dir separators.
if sys.platform.startswith("win"):
path = buf.path.replace('\\', '/')
else:
path = buf.path
tree = ET.Element("codeintel", version="2.0",
xmlns="urn:activestate:cix:2.0")
file_elt = ET.SubElement(tree, "file", lang=lang, mtime=str(mtime))
blob = ET.SubElement(file_elt, "scope", ilk="blob", lang=lang,
name=os.path.basename(path))
# Dev Note:
# This is where you process the TorqueScript content and add CIX elements
# to 'blob' as per the CIX schema (cix-2.0.rng). Use the
# "buf.accessor" API (see class Accessor in codeintel2.accessor) to
# analyze. For example:
# - A token stream of the content is available via:
# buf.accessor.gen_tokens()
# Use the "codeintel html -b <example-TorqueScript-file>" command as
# a debugging tool.
# - "buf.accessor.text" is the whole content of the file. If you have
# a separate tokenizer/scanner tool for TorqueScript content, you may
# want to use it.
#log.info("Setting scan buffer")
old_stdout = sys.stdout
sys.stdout = sys.stderr
ytab.yy_clear_stacks()
torque_lex.set_scan_buffer(buf.accessor.text, is_filename=False)
try:
#log.info("Attempting parse")
successful_parse = not ytab.yyparse()
except Exception:
successful_parse = False
import traceback
traceback.print_exc(file=sys.stderr)
traceback.print_tb(sys.exc_info()[2], file=sys.stderr)
if successful_parse:
#let's extract something here
ts_ast = ytab.yyvs[1]
#log.info("Extracting blob")
extract_blob_data(ts_ast, blob, file_elt)
else:
file_elt.set("error","Error parsing file")
sys.stdout = old_stdout
return tree
| [
"elfprince13@gmail.com"
] | elfprince13@gmail.com |
aeb889eaa9e03351923091f9dba2aa050c7ddbb4 | 0107b9b0b2af36b1bd82ba1f7f0d912c58ef6b33 | /article/urls.py | 64f5be1403374ba5f42779b1f510088b5bc11095 | [] | no_license | 704296546/learndj2 | 348ade1c1c920c0c616ebac54434bc995975929d | d843fabdd76e09fd979e41270c09f289ebbc7547 | refs/heads/master | 2020-08-07T17:01:30.089747 | 2019-10-08T02:36:40 | 2019-10-08T02:36:40 | 213,532,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,485 | py | from django.urls import path
from django.urls import re_path
app_name = 'article' # 一定要写这一行,否则html中会报错 'article' is not a registered namespace
from . import views, list_views
app_name = 'article' # 一定要写这一行,否则html中会报错 'article' is not a registered namespace
urlpatterns = [
path('article-column/', views.article_column, name="article_column"),
path('rename-article-column/', views.rename_article_column, name="rename_article_column"),
path('del-article-column/', views.del_article_column, name="del_article_column"),
path('article-post/', views.article_post, name="article_post"),
path('article-list/', views.article_list, name="article_list"),
re_path('article-detail/(?P<id>\d+)/(?P<slug>[-\w]+)/$', views.article_detail, name="article_detail"),
path('del-article/', views.del_article, name="del_article"),
path('redit-article/<int:article_id>/', views.redit_article, name="redit_article"),
path('list-article-titles/', list_views.article_titles, name="article_titles"),
path('article-content/<int:id>/<slug:slug>/', list_views.article_detail, name="article_content"),
path('list-article-titles/<username>/', list_views.article_titles, name="author_articles"),
path('like-article/', list_views.like_article, name="like_article"),
path('article-tag/', views.article_tag, name="article_tag"),
path('del-article-tag/', views.del_article_tag, name="del_article_tag"),
]
| [
"51222598+704296546@users.noreply.github.com"
] | 51222598+704296546@users.noreply.github.com |
a472c103c0b1f3c1f8c566e750f7ba8e53639190 | 65cc6a8877896ef69dd03d7b5eee5bed56e5371f | /example/attpc-daq/web/attpcdaq/daq/templatetags/daq_model_tags.py | 600bbc1d51d3e665f9f57b9b0ce19ce3797deda5 | [] | no_license | wuhongyi/DjangoNote | 34bdb9e82fc379e19b1df0bd7c90e504fa70a40d | 81ad949ff895feda8131d8bdf5fa1439f962ae37 | refs/heads/master | 2020-05-02T17:54:12.270297 | 2019-05-22T14:37:32 | 2019-05-22T14:37:32 | 178,112,720 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 762 | py | from django import template
from ..models import DataSource
register = template.Library()
def get_datasource_attr_from_choices(attr_name, choices):
value = getattr(DataSource, attr_name, None)
# Verify that the result is a valid member of the set of choices.
# This also ensures that we're not just returning any random attribute
# of the model, but just one member of a set of constants.
if value not in (key for key, name in choices):
return None
else:
return value
@register.simple_tag
def datasource_state(name):
return get_datasource_attr_from_choices(name, DataSource.STATE_CHOICES)
@register.simple_tag
def daq_state(name):
return get_datasource_attr_from_choices(name, DataSource.DAQ_STATE_CHOICES) | [
"wuhongyi@pku.edu.cn"
] | wuhongyi@pku.edu.cn |
4150071abeda7b1b600c232b55e47d2a2e57dc48 | 20daa1f21ba2cb1bd86fa14fc3550c5229bc99b4 | /Img_Crawler/anime_crawl/run.py | dfa4e1c6448cc9c891443a6c9f2a84707e01b7eb | [] | no_license | hakanaku1234/Keras-Illustration2Vec | e6ca3359f1a8397b957218df777d5ebf68f49fbf | 7987f3721230eaf443f10c27c2023e6ef8588520 | refs/heads/master | 2020-09-05T05:44:47.471353 | 2019-01-17T06:14:06 | 2019-01-17T06:14:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | from scrapy import cmdline
import sys
frac = sys.argv[1]
maxfrac = sys.argv[2]
cmdline.execute(("scrapy crawl anime -o tags_%s.csv -a frac=%s -a maxfrac=%s" %(frac,frac,maxfrac)).split())
| [
"30581485+seekerzz@users.noreply.github.com"
] | 30581485+seekerzz@users.noreply.github.com |
c5b9da01ce23e0b136581d317876cd16c3166c6a | 1078937a951f97f1e0c32db9642151195bdcc27e | /TestPara/pymp/par.py | 310d0c213d345c2bd33bf29496641f78cfa95aec | [] | no_license | AmilaWeerasinghe/e15-4yp-Real-Time-Data-processing-and-AI-for-Distributed-IoT | 0b61b7ab29539c60b84997fbb322575d84e2d037 | 43eb3a343f1d6351865d3ed44ccf5ea05e539116 | refs/heads/main | 2023-04-13T22:46:02.023941 | 2021-04-20T11:23:25 | 2021-04-20T11:23:25 | 359,160,012 | 1 | 0 | null | 2021-04-18T18:56:29 | 2021-04-18T14:05:20 | null | UTF-8 | Python | false | false | 372 | py | from __future__ import print_function
import pymp
ex_array = pymp.shared.array((100,), dtype='uint8')
if __name__ == '__main__':
with pymp.Parallel(4) as p:
for index in p.range(0, 100):
ex_array[index] = 1
# The parallel print function takes care of asynchronous output.
p.print('Yay! {} done!'.format(index))
| [
"noreply@github.com"
] | noreply@github.com |
cc16d1697225baee47a86dda51adb9016bdd330c | 3f394cd47a1aaf0ae2f8de5ab9854f52341e017a | /tests/conftest.py | 0ec2f5ef473a93e1446046c292552c5de1df0cff | [
"MIT"
] | permissive | devildeveloper/Clay | e3771d97d23ae3ba7d866d8921102d50e95a6562 | ca419ee4cfe191724ed68e3507515a5b258bb4bb | refs/heads/master | 2021-01-18T02:27:22.094481 | 2013-11-18T20:24:02 | 2013-11-18T20:24:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | # -*- coding: utf-8 -*-
"""
Directory-specific fixtures, hooks, etc. for py.test
"""
from clay import Clay
import pytest
from .helpers import TESTS
@pytest.fixture()
def c():
return Clay(TESTS)
@pytest.fixture()
def t(c):
return c.get_test_client()
| [
"juanpablo@lucumalabs.com"
] | juanpablo@lucumalabs.com |
db15a4704fc528cd8642dd3381dc5a68d2cc81e1 | 069d70243971685c39f8b8a6952ab7a2dad29ac4 | /lab5.1/q1_051.py | ce56a3b6975580b032dd03652a5626745812ded9 | [] | no_license | oisinhenry/CA117-2018 | 3679c762163178e863cfd6aa0bbfa65770d66a69 | 9f55dd4d7fa73e72191d6753c0fd7d198137b2b0 | refs/heads/master | 2021-01-24T11:59:08.404124 | 2018-04-18T13:03:03 | 2018-04-18T13:03:03 | 123,111,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | import sys
s = sys.argv[1]
new_s = ""
if len(s) > 1:
i = 1
while i < len(s):
new_s += s[i]
new_s += s[i-1]
i += 2
if len(s) % 2 != 0:
new_s += s[-1]
else:
new_s = s
print(new_s) | [
"oisinhenry@gmail.com"
] | oisinhenry@gmail.com |
f7111d3baed74859346e502aea7afd028bf342d0 | 1426a57b8df011ae2666220bec6e189b2b28e454 | /examples/tutorials/lines.py | 10db830926997bae45a9a8ebb4cd77472ccc35dd | [
"BSD-3-Clause"
] | permissive | srijac/pygmt | 848604e734e7a11dbd5d8835735e70df59fa0a68 | cd822ca2c01f98a1496c63cc0bff8cc6661cb3bb | refs/heads/main | 2023-07-13T04:51:08.833644 | 2021-08-17T09:43:31 | 2021-08-17T09:43:31 | 397,364,498 | 1 | 0 | BSD-3-Clause | 2021-08-19T20:58:03 | 2021-08-17T19:06:21 | null | UTF-8 | Python | false | false | 3,501 | py | """
Plotting lines
==============
Plotting lines is handled by :meth:`pygmt.Figure.plot`.
"""
# sphinx_gallery_thumbnail_number = 3
import pygmt
########################################################################################
# Plot lines
# ----------
#
# Create a Cartesian figure using ``projection`` parameter and set the axis scales
# using ``region`` (in this case, each axis is 0-10). Pass a list of ``x`` and ``y``
# values to be plotted as a line.
fig = pygmt.Figure()
fig.plot(
region=[0, 10, 0, 10],
projection="X15c/10c",
frame="a",
x=[1, 8],
y=[5, 9],
pen="1p,black",
)
fig.show()
########################################################################################
# Additional line segments can be added by including additional values for ``x``
# and ``y``.
fig = pygmt.Figure()
fig.plot(
region=[0, 10, 0, 10],
projection="X15c/10c",
frame="a",
x=[1, 6, 9],
y=[5, 7, 4],
pen="1p,black",
)
fig.show()
########################################################################################
# To plot multiple lines, :meth:`pygmt.Figure.plot` needs to be used for each
# additional line. Arguments such as ``region``, ``projection``, and ``frame`` do
# not need to be repeated in subsequent uses.
fig = pygmt.Figure()
fig.plot(
region=[0, 10, 0, 10],
projection="X15c/10c",
frame="a",
x=[1, 6, 9],
y=[5, 7, 4],
pen="2p,blue",
)
fig.plot(x=[2, 4, 10], y=[3, 8, 9], pen="2p,red")
fig.show()
########################################################################################
# Change line attributes
# ----------------------
#
# The line attributes can be set by the ``pen`` parameter. ``pen`` takes a string
# argument with the optional values *width*,\ *color*,\ *style*.
#
# In the example below, the pen width is set to ``5p``, and with ``black`` as the
# default color and ``solid`` as the default style.
fig = pygmt.Figure()
fig.plot(
region=[0, 10, 0, 10],
projection="X15c/10c",
frame="a",
x=[1, 8],
y=[3, 9],
pen="5p",
)
fig.show()
########################################################################################
# The line color can be set and is added after the line width to the ``pen`` parameter.
# In the example below, the line color is set to ``red``.
fig = pygmt.Figure()
fig.plot(
region=[0, 10, 0, 10],
projection="X15c/10c",
frame="a",
x=[1, 8],
y=[3, 9],
pen="5p,red",
)
fig.show()
########################################################################################
# The line style can be set and is added after the line width or color to the
# ``pen`` parameter. In the example below, the line style is set to
# ``..-`` (*dot dot dash*), and the default color ``black`` is used.
fig = pygmt.Figure()
fig.plot(
region=[0, 10, 0, 10],
projection="X15c/10c",
frame="a",
x=[1, 8],
y=[3, 9],
pen="5p,..-",
)
fig.show()
########################################################################################
# The line width, color, and style can all be set in the same ``pen`` parameter. In the
# example below, the line width is set to ``7p``, the color is set to ``green``, and the
# line style is ``-.-`` (*dash dot dash*).
#
# For a gallery showing other ``pen`` settings, see :doc:`/gallery/lines/linestyles`.
fig = pygmt.Figure()
fig.plot(
region=[0, 10, 0, 10],
projection="X15c/10c",
frame="a",
x=[1, 8],
y=[3, 9],
pen="7p,green,-.-",
)
fig.show()
| [
"noreply@github.com"
] | noreply@github.com |
04cd109df9311d85682f4e577ed1c3f8f976fcc7 | f7054d931c43f5b209b2e3648e4af812b48935d2 | /interfaces/storage-server/requires.py | 32c05f5af53cdca945566cbd57a04f761c37b383 | [] | no_license | zhougit86/demo_charm | c610a944a360e068762a610435d932c2f88b442c | c108df5d04804e12cdb7f3542686255bca88b0d0 | refs/heads/master | 2021-01-18T15:45:29.387199 | 2017-05-22T04:40:57 | 2017-05-22T04:40:57 | 86,680,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,940 | py | from charms.reactive import RelationBase
from charms.reactive import hook
from charms.reactive import scopes
class ServerStorageRequires(RelationBase):
scope = scopes.UNIT
@hook('{requires:server-storage}-relation-{joined,changed}')
def changed(self):
conv = self.conversation()
if conv.get_remote('anotherConfig'):
# this unit's conversation has a port, so
# it is part of the set of available units
conv.set_state('{server-storage}.available')
@hook('{requires:server-storage}-relation-{departed}')
def departed(self):
conv = self.conversation()
conv.remove_state('{server-storage}.available')
def services(self):
"""
Returns a list of available server-storage services and their associated hosts
and configs.
The return value is a list of dicts of the following form::
[
{
'service_name': name_of_service,
'hosts': [
{
'hostname': address_of_host,
'anotherConfig': unit's config,
},
# ...
],
},
# ...
]
"""
services = {}
for conv in self.conversations():
service_name = conv.scope.split('/')[0]
service = services.setdefault(service_name, {
'service_name': service_name,
'hosts': [],
})
host = conv.get_remote('hostname') or conv.get_remote('private-address')
port = conv.get_remote('anotherConfig')
if host and port:
service['hosts'].append({
'hostname': host,
'anotherConfig': port,
})
return [s for s in services.values() if s['hosts']]
| [
"fengxia_41103@hotmail.com"
] | fengxia_41103@hotmail.com |
d546c510a58b01a177c9d64ec2c323aa473720ae | c5d68f58c9523257a8b41954553f5cff2cd5f487 | /Secao_13_Lista_Ex_29e/ex_27.py | 5ab77700769e25293337bd239ed838f3bd7ed0dc | [] | no_license | SouzaCadu/guppe | 04bfcde82d4404eb9ec795006c6931ba07dc72b6 | 1f8a672230c5c27712f522e1e34516591c012453 | refs/heads/master | 2023-03-13T01:32:51.019871 | 2021-02-25T17:02:59 | 2021-02-25T17:02:59 | 320,908,119 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,378 | py | """
27) Faça um programa para gerenciar as notas dos alunos de uma turma salva em um arquivo. O programa deverá ter um menu
contendo as seguinte opções:
(a) Definir informações da turma;
(b) Inserir aluno e notas;
(c) Exibir alunos e médias;
(d) Exibir alunos aprovados;
(e) Exibir alunos reprovados;
(f) Salvar dados em Disco;
(g) Sair do programa (fim)
Faça a rotina que gerencia o menu dentro do main, e para cada uma das opções deste menu, crie uma função específica
# OBS: Não será necessário criar a função/opção para salvar os dados em disco, pois será salvo automaticamente.
"""
from valida_cadastro import valida_nome
def informacoes_turma(arquivo):
"""Função que recebe o caminho/nome do arquivo e imprimi na tela
as informações da turma. Caso o arquivo não exista o mesmo será criado"""
try:
with open(arquivo, "a") as _:
pass
with open(arquivo, "r", encoding="utf-8") as leitura:
print(f"\n\n{'-' * 48}INFORMAÇÕES DA TURMA{'-' * 49}")
texto = leitura.read().strip().splitlines()
if len(texto) > 0:
[print(f"{informacao.replace(';', ' - ')}\n{'-' * 117}") for informacao in texto]
else:
print(f"\n{'-' * 117}")
except ValueError:
print("-" * 117)
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("ERRO AO LER O ARQUIVO!")
print("-" * 117)
except FileNotFoundError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O PROGRAMA NÃO POSSUI PERMISSÃO PARA CRIAR UM DIRETÓRIO/PASTA!")
print("-" * 117)
except OSError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O SO NÃO ACEITA CARACTERES ESPECIAIS EM NOMES DE ARQUIVOS!")
print("-" * 117)
def inserir_notas_alunos(arquivo):
"""Função que recebe o caminho/arquivo e insere no mesmo os dados
dos alunos e as notas informadas pelo usuário. Caso o arquivo não exista o mesmo
será criado"""
try:
with open(arquivo, "a", encoding="utf-8") as insercao:
print(f"\n\n{'-' * 54}INSERÇÃO{'-' * 55}")
cod = abs(int(input("Insira o identificador(código) do aluno: ")))
print("-" * 117)
codigo_existe = False
with open(arquivo, "r", encoding="utf-8") as leitura:
texto = leitura.read().strip().splitlines()
texto = [informacao.split(";") for informacao in texto]
for linha in texto:
if cod == int(linha[0]):
print(cod, int(linha[0]))
codigo_existe = True
if not codigo_existe:
nome = str(input(f"Insira o nome do aluno {cod}: ")).strip().title()
print("-" * 117)
if valida_nome(nome):
nota1 = float(input(f"Insira a primeira nota do aluno {nome}: "))
print("-" * 117)
nota2 = float(input(f"Insira a segunda nota do aluno {nome}: "))
print("-" * 117)
nota3 = float(input(f"Insira a terceira nota do aluno {nome}: "))
print("-" * 117)
insercao.write(f"{cod};{nome};{nota1} - {nota2} - {nota3}\n")
else:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("NOME INVÁLIDO!")
print("-" * 117)
else:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print(f"IDENTIFICADOR(CÓDIGO) {cod} JÁ EXISTENTE!")
print("-" * 117)
except ValueError:
print("-" * 117)
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("ERRO AO RECEBER OS DADOS DO USUÁRIO OU AO LER O ARQUIVO!")
print("-" * 117)
except FileNotFoundError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O PROGRAMA NÃO POSSUI PERMISSÃO PARA CRIAR UM DIRETÓRIO/PASTA!")
print("-" * 117)
except OSError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O SO NÃO ACEITA CARACTERES ESPECIAIS EM NOMES DE ARQUIVOS!")
print("-" * 117)
except IndexError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O MODO QUE AS INFORMAÇÕES SE ENCONTRAM NO TEXTO É INVÁLIDO!")
print("-" * 117)
def media_aluno(linha):
"""Função que recebe a linha referente ao arquivo
que armazena as informações dos alunos e retorna sua média"""
notas = [float(nota) for nota in linha[-1].split(" - ")]
media = float("{:.1f}".format(float(sum(notas) / len(notas))))
return media
def alunos_medias(arquivo):
"""Função que recebe o caminho/arquivo e imprimi na tela
o nome e a média de cada aluno. Caso o arquivo não exista o mesmo
será criado"""
try:
with open(arquivo, "a", encoding="utf-8") as _:
pass
with open(arquivo, "r", encoding="utf-8") as leitura:
texto = leitura.read().strip().splitlines()
texto = [informacoes.split(";") for informacoes in texto]
print(f"\n\n{'-' * 51}ALUNOS E MÉDIAS{'-' * 51}")
[print(f"{linha[1]} - {media_aluno(linha)}\n{'-' * 117}") for linha in texto]
except ValueError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("ERRO AO LER O ARQUIVO!")
print("-" * 117)
except FileNotFoundError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O PROGRAMA NÃO POSSUI PERMISSÃO PARA CRIAR UM DIRETÓRIO/PASTA!")
print("-" * 117)
except OSError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O SO NÃO ACEITA CARACTERES ESPECIAIS EM NOMES DE ARQUIVOS!")
print("-" * 117)
except IndexError:
print(f"\n{'-' * 117}")
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O MODO QUE AS INFORMAÇÕES SE ENCONTRAM NO TEXTO É INVÁLIDO!")
print("-" * 117)
def alunos_aprovados(arquivo):
"""Função que recebe o caminho/arquivo e imprimi na tela
o nome dos alunos que estão aprovados. Caso o arquivo não
exista o mesmo será criado"""
try:
with open(arquivo, "a", encoding="utf-8") as _:
pass
with open(arquivo, "r", encoding="utf-8") as leitura:
texto = leitura.read().strip().splitlines()
texto = [informacoes.split(";") for informacoes in texto]
print(f"\n\n{'-' * 54}APROVADOS{'-' * 54}")
[print(f"{linha[1]}\n{'-' * 117}") for linha in texto if media_aluno(linha) >= 6.0]
except ValueError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("ERRO AO LER O ARQUIVO!")
print("-" * 117)
except FileNotFoundError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O PROGRAMA NÃO POSSUI PERMISSÃO PARA CRIAR UM DIRETÓRIO/PASTA!")
print("-" * 117)
except OSError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O SO NÃO ACEITA CARACTERES ESPECIAIS EM NOMES DE ARQUIVOS!")
print("-" * 117)
except IndexError:
print(f"\n{'-' * 117}")
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O MODO QUE AS INFORMAÇÕES SE ENCONTRAM NO TEXTO É INVÁLIDO!")
print("-" * 117)
def alunos_reprovados(arquivo):
"""Função que recebe o caminho/arquivo e imprimi na tela
o nome dos alunos que estão reprovados. Caso o arquivo
não exista o mesmo será criado"""
try:
with open(arquivo, "a", encoding="utf-8") as _:
pass
with open(arquivo, "r", encoding="utf-8") as leitura:
texto = leitura.read().strip().splitlines()
texto = [informacoes.split(";") for informacoes in texto]
print(f"\n\n{'-' * 54}REPROVADOS{'-' * 54}")
[print(f"{linha[1]}\n{'-' * 117}") for linha in texto if media_aluno(linha) < 6.0]
except ValueError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("ERRO AO LER O ARQUIVO!")
print("-" * 117)
except FileNotFoundError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O PROGRAMA NÃO POSSUI PERMISSÃO PARA CRIAR UM DIRETÓRIO/PASTA!")
print("-" * 117)
except OSError:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O SO NÃO ACEITA CARACTERES ESPECIAIS EM NOMES DE ARQUIVOS!")
print("-" * 117)
except IndexError:
print(f"\n{'-' * 117}")
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("O MODO QUE AS INFORMAÇÕES SE ENCONTRAM NO TEXTO É INVÁLIDO!")
print("-" * 117)
if __name__ == "__main__":
nome_arquivo = "relacao_notas.txt"
try:
while True:
# Como as funções já irão salvar os dados em disco automaticamente,
# então não criarei a opção para salvar dados em disco
print(f"\n\n{'-' * 56}MENU{'-' * 57}")
print("1 - Definir informações da turma")
print(f"{'-' * 117}")
print("2 - Inserir aluno e notas")
print(f"{'-' * 117}")
print("3 - Exibir alunos e médias")
print(f"{'-' * 117}")
print("4 - Exibir alunos aprovados")
print(f"{'-' * 117}")
print("5 - Exibir alunos reprovados")
print(f"{'-' * 117}")
print("6 - Sair do programa (fim)")
print(f"{'-' * 117}")
opcao = abs(int(input("\nInsira o número da opção que você deseja: ")))
print(f"{'-' * 117}")
if opcao == 1:
informacoes_turma(nome_arquivo)
elif opcao == 2:
inserir_notas_alunos(nome_arquivo)
elif opcao == 3:
alunos_medias(nome_arquivo)
elif opcao == 4:
alunos_aprovados(nome_arquivo)
elif opcao == 5:
alunos_reprovados(nome_arquivo)
elif opcao == 6:
print(f"\n\n{'-' * 51}FIM DO PROGRAMA{'-' * 51}")
break
else:
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("OPÇÃO INVÁLIDA!")
print("-" * 117)
except ValueError:
print("-" * 117)
print(f"\n\n{'-' * 56}ERRO{'-' * 57}")
print("OPÇÃO DEVE SER UM NÚMERO INTEIRO!")
print("-" * 117)
| [
"cadu.souza81@gmail.com"
] | cadu.souza81@gmail.com |
f4a1d83f9e722ad21f7b9e6333b1b6b8bdb56c50 | 2919d686fb37acf8f533f48c64f4138f95b6915e | /MNIST_Null_Space_Tuning/mnist_standard/data.py | 04c4b70f742555d5c89fad5748f19dd8d3c9f075 | [] | no_license | hanscol/Null_Space_Classification | c8d0cc13f49785b074af37dd9a589c5aebfbe712 | 949961243a29ab6c6335055972857cb8de668a54 | refs/heads/master | 2020-04-12T04:49:33.761875 | 2020-02-23T21:38:07 | 2020-02-23T21:38:07 | 162,306,940 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,083 | py | from __future__ import print_function, division
import torch
import torchvision
import os
from skimage import io, transform
import numpy as np
from torch.utils.data import Dataset
import random
import matplotlib.pyplot as plt
from datetime import datetime
import warnings
warnings.filterwarnings("ignore")
def train_files(folder):
class_folders = os.listdir(folder)
data = {}
for c in class_folders:
files = os.listdir(os.path.join(folder, c))
for f in files:
data[os.path.join(folder,c,f)] = int(c)
return data
# def test_files(folder):
# files = os.listdir(folder)
# data = []
# for f in files:
# data.append(f)
#
# return data
class Train_Dataset(Dataset):
def __init__(self, data, null_split=0):
self.data = data
self.keys = list(data.keys())
self.tensor = torchvision.transforms.ToTensor()
self.class_to_files = {}
for key in self.keys:
label = self.data[key]
if label not in self.class_to_files:
self.class_to_files[label] = [key]
else:
self.class_to_files[label].append(key)
for key in list(self.class_to_files.keys()):
self.class_to_files[key] = sorted(self.class_to_files[key])
if null_split > 0:
class_split = int(null_split/10)
diff = null_split - class_split*10
for i in range(10):
keys = self.class_to_files[i]
if i == 9:
split_keys = keys[0:class_split+diff]
else:
split_keys = keys[0:class_split]
for key in split_keys:
del self.data[key]
self.class_to_files[i] = split_keys
self.keys = list(self.data.keys())
def __len__(self):
return len(self.keys)
def __getitem__(self, idx):
fname = self.keys[idx]
label = self.data[fname]
img = io.imread(fname)
img = img / 255.0
transform.resize(img, [28, 28], mode='constant', anti_aliasing=True)
img = np.expand_dims(img, axis=2)
img = self.tensor(img)
img = img.type(torch.float32)
return {'image': img, 'target': torch.tensor(label)}
class Test_Dataset(Dataset):
def __init__(self, data):
self.data = data
self.keys = list(data.keys())
self.tensor = torchvision.transforms.ToTensor()
def __len__(self):
return len(self.keys)
def preprocess(self, fname):
img = io.imread(fname)
img = img / 255.0
transform.resize(img, [28, 28], mode='constant', anti_aliasing=True)
img = np.expand_dims(img, axis=2)
img = self.tensor(img)
img = img.type(torch.float32)
return img
def __getitem__(self, idx):
fname = self.keys[idx]
label = self.data[fname]
img = self.preprocess(fname)
return {'image': img, \
'target': torch.tensor(label), \
'file': fname}
| [
"hanscol45@outlook.com"
] | hanscol45@outlook.com |
0052e096e35f38a3d6307d9a0a87f726012af309 | 06bab191a6aad5d57b70b44a6549373491a7c364 | /predict.py | 81facd0140b071eed079856f8aed7e845613343f | [
"MIT"
] | permissive | ufwt/TreeGen | 4db97ab910531329128841ba8b0d6b370e0a7dec | 3f01f8f40329b334dd03e41f41673a761d44bef8 | refs/heads/master | 2022-12-01T20:38:14.817423 | 2020-08-11T08:36:07 | 2020-08-11T08:36:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,809 | py | #-*-coding:utf-8-*-
import sys
from code_generate_model import *
from resolve_data import *
import os
import tensorflow as tf
import numpy as np
import os
import math
import queue as Q
from copy import deepcopy
from tqdm import tqdm
project = str(sys.argv[1]) + "/"
os.environ["CUDA_VISIBLE_DEVICES"]="5"
vocabu = {}
tree_vocabu = {}
vocabu_func = {}
tree_vocabu_func = {}
vocabu_var = {}
tree_vocabu_var = {}
J_HasSon = []
J_VarList = []
J_readrulenum = -1
J_NeedsEnd = []
J_NlList = []
global_step = 0
embedding_size = 128
conv_layernum = 128
conv_layersize = 3
rnn_layernum = 50
batch_size = 64
NL_vocabu_size = len(vocabulary)
Tree_vocabu_size = len(tree_vocabulary)
NL_len = nl_len
Tree_len = tree_len
learning_rate = 1e-5
keep_prob = 0.5
pretrain_times = 0
pretrain_dis_times = 2
train_times = 1000
parent_len = 20
rulelist_len = 200
step_list_p = []
numberstack = []
list2wordlist = []
copy_node = []
cardnum = []
def J_readrule():
f = open(project + "Rule.txt", "r")
lines = f.readlines()
f.close()
for line in lines:
J_HasSon.append(line.strip().split()[0])
f = open(project + "copy_node.txt", "r")
lines = f.readlines()
f.close()
for line in lines:
J_HasSon.append(line.strip().split()[0])
f = open(project + "WithListEnd.txt", "r")
lines = f.readlines()
f.close()
for line in lines:
J_HasSon.append(line.strip().split()[0])
J_NeedsEnd.append(line.strip().split()[0])
J_readrule()
def J_readlist(in_file):
lines = in_file.readlines()
global J_readrulenum
J_readrulenum = int(lines[1])
return lines[0].replace(" node_gen ^", "").strip().split()
def J_findtheson_name(l, site):
ans = []
count = 0
nowsite = site
while (nowsite < len(l)):
if l[nowsite] == "^":
count -= 1
else:
if count == 1:
ans.append(l[nowsite])
count += 1
if count <= 0:
break
nowsite += 1
return ans
def J_isend(l, site):
if l[site] not in J_HasSon:
return True
if l[site] == "End" or l[site] == "^":
return True
sonlist = J_findtheson_name(l, site)
if len(sonlist) == 0:
return False
elif l[site] not in J_NeedsEnd:
return True
elif l[site] in J_NeedsEnd and sonlist[-1] == "End":
return True
return False
def J_findthecontrol(liststr, site):
ans = [site + 1]
count = 0
nowsite = site
while (nowsite < len(liststr)):
if liststr[nowsite] == "^":
count -= 1
else:
count += 1
if count <= 0:
ans.append(nowsite)
break
nowsite += 1
return ans
def J_AddOneSon(l, rule, site):
node = l[site]
if node != rule[0]:
assert False
se = J_findthecontrol(l, site)
e = se[1]
s = se[0]
newlist = deepcopy(l)
newlist.insert(e, "^")
step_list_p.insert(e, global_step)
newlist.insert(e, rule[1][0])
step_list_p.insert(e, global_step)
return newlist
def J_AddSon(l, rulenum, site):
if rulenum >= len(Rule): # for copy
newlist = deepcopy(l)
newlist.insert(site + 1, "^")
step_list_p.insert(site + 1, global_step)
newlist.insert(site + 1, J_NlList[rulenum - len(Rule)])
step_list_p.insert(site + 1, global_step)
return newlist
newlist = deepcopy(l)
rule = Rule[rulenum]
for son in rule[1][::-1]:
newlist.insert(site + 1, "^")
step_list_p.insert(site + 1, global_step)
newlist.insert(site + 1, son)
step_list_p.insert(site + 1, global_step)
return newlist
def J_AddSon_nodegen(l, site):
newlist = deepcopy(l)
se = J_findthecontrol(l, site)
newlist.insert(se[1], "^")
#step_list_p.insert(se[1], global_step)
newlist.insert(se[1], "node_gen")
#step_list_p.insert(se[1], global_step)
return newlist
father_index_now = -1
def J_scan(l, rulenum):
for i in range(len(l)):
now = l[i]
if now == "^":
continue
if not J_isend(l, i):
global father_index_now
father_index_now = step_list_p[i]
if l[i] in J_NeedsEnd:
return J_AddOneSon(l, Rule[J_readrulenum], i)
return J_AddSon(l, J_readrulenum, i)
return None
def J_findthefather_site(l, site):
index = site - 1
count = 0
if index < 0:
return -1
while index >= 0:
#print ("fa", words[i])
if "^" in l[index]:
count -= 1
else:
count += 1
if count == 1:
# exit()
# print (words[index])
#return words[index]
return index
index -= 1
return -1
def J_scan_for_node(l, rulenum):
for i in range(len(l)):
now = l[i]
if now == "^":
continue
if not J_isend(l, i):
newl = J_AddSon_nodegen(l, i)
return newl, i#J_findthefather_site(newl, i + 1)
return None, -1
def J_getfeaturenode(l, nextsite):
i = nextsite
node_par = []
node_par.append(l[i])
par = J_findthefather_site(l, i)
pars = ""
if par == -1:
pars = "Unknown"
else:
pars = l[par]
node_par.append(pars)
l_f = []
par = i
while (par != -1):
l_f.append(l[par])
par = J_findthefather_site(l, par)
node_par.append(" ".join(l_f))
return node_par
def J_run():
global global_step
in_file = open(project + "Tree_Rule.in")
#in_file.close()
fw = open(project + "Tree_Feature.out", "w")
l = J_readlist(in_file)
in_file.close()
global_step += 1
newl = J_scan(l, J_readrulenum)
if newl == None:
fw.write(" ".join(l) + "\n")
fw.write("END\n")
else:
newl1, nextsite = J_scan_for_node(newl, J_readrulenum)
if newl1 == None:
fw.write(" ".join(newl) + "\n")
fw.write("END\n")
else:
newl = newl1
node_par = J_getfeaturenode(newl, nextsite)
out = " ".join(newl)
fw.write(out.replace(" node_gen ^", "") + "\n")
fw.write(node_par[0] + "\n")
fw.write(node_par[1] + "\n")
fw.write(node_par[2] + "\n")
fw.write(out.replace(" End ^", "") + "\n")
fw.write(out.replace(" End ^", "") + "\n")
fw.write("1\n")
fw.write("1\n")
fw.write(out.replace(" End ^", "") + "\n")
fw.write("1\n")
fw.write(str(father_index_now) + "\n")
fw.close()
def create_model(session, g, placeholder=""):
if(os.path.exists(project + "save1")):
saver = tf.train.Saver()
saver.restore(session, tf.train.latest_checkpoint(project + "save1/"))
print("load the model")
else:
classvec = data_random()
session.run(tf.global_variables_initializer(), feed_dict={d.variable:classvec})
print("create a new model")
class Javaoutput:
def __init__(self, Tree, Nl, Node, PNode , Root, TreeWithEnd,FatherTree, GrandFatherTree, state):
self.Tree = Tree
self.Nl = Nl
self.Node = Node
self.PNode = PNode
self.Root = [Root]
self.Probility = 1
self.is_end = False
self.state = state
self.FuncDict = {}
self.FuncList = []
self.VarList = []
self.father_index = []
self.rule = []
self.RuleList = []
self.DeepthList = []
self.FatherTree = FatherTree
self.TreeWithEnd = TreeWithEnd
self.GrandFatherTree = GrandFatherTree
self.list2wordlistjava = []
self.numberstack = []
self.step_list = [-1] * 30
self.gs = -1
def prin(self):
print(self.Tree)
def __lt__(self, other):
return self.Probility > other.Probility
def getJavaOut(Nl):
f = open(project + "Tree_Feature.out", "r")
lines = f.readlines()
f.close()
# print(lines)
if len(lines) == 2:
return Javaoutput(lines[0][:-1], Nl, "", "", "", "", "", "", "end")
if len(lines) == 12:
return Javaoutput(lines[4][:-1], Nl, lines[1][:-1], lines[2][:-1], lines[3][:-1], lines[0][:-1],lines[6][:-1], lines[7][:-1], "end")
if len(lines) == 1:
return Javaoutput("", Nl, "", "", "", "", "", "", "error")
return Javaoutput(lines[4][:-1], Nl, lines[1][:-1], lines[2][:-1], lines[3][:-1], lines[0][:-1],lines[6][:-1], lines[7][:-1], "grow")
def getlistDeep_all(inputlist):
ne = []
count = 0
for p in inputlist:
if p == "^":
count -= 1
ne.append(count)
else:
ne.append(count)
count += 1
return ne
def cov(tree):
ans = " "
li = tree.split()
#for s in str:
deeplist = getlistDeep_all(li)
mp = {}
for i in range(len(li)):
if li[i] == "^":
now = deeplist[i]
li[i] = mp[now] + "^"
else:
mp[deeplist[i]] = li[i]
ans += " " + li[i]
return ans.replace(" ", "")
def pre_mask():
mask = np.zeros([rulelist_len, rulelist_len])
for i in range(rulelist_len):
for t in range(i + 1):
mask[i][t] = 1
return mask
def g_predict_beam(sess, model, batch_data):
batch = batch_data
rewards = np.zeros([len(batch[1])])
for i in range(len(rewards)):
rewards[i] = 1
y = sess.run(model.y_result, feed_dict={model.input_NL: batch[0],
model.input_NLChar:batch[1],
model.inputrulelist:batch[6],
model.inputrulelistnode:batch[7],
model.inputrulelistson:batch[8],
model.tree_path_vec: batch[9],
model.treemask: batch[10],
model.father_mat: batch[11],
model.labels:batch[12],
model.antimask:pre_mask(),
model.keep_prob: 1,
model.rewards: rewards,
model.is_train: False
})
for i in range(len(batch[6][0])):
if batch[6][0][i] == 0:
return y[0][i - 1]
return y[ -1 ]
def get_tree_path_vec_for_pre (tree_path):
fathers = []
tree_path_len = 10
tree_path_vec = np.zeros([length[5], tree_path_len])
#return tree_path_vec
for i in range(len(tree_path)):
words = tree_path[i].strip().split()
for t in range(min(len(words), tree_path_len)):
tree_path_vec[i][t] = word2vec(words[t], "tree")
fathers.append(word2vec(words[0], "tree"))
return tree_path_vec, fathers
step = 1
def getAction(sess, Code_gen_model, JavaOut):
valid_batch, _ = batch_data(1, "test") # read data
input_NL = line2vec(JavaOut.Nl, "nl", length[0])
input_NLChar = line2charvec(JavaOut.Nl, length[0], char_len)
input_Tree = line2vec(cov(JavaOut.Tree), "tree", length[1])
input_Father = line2vec(cov(JavaOut.FatherTree), "tree", length[2])
input_Grand = line2vec(cov(JavaOut.GrandFatherTree), "tree", length[3])
tree_path_vec, father_vec = get_tree_path_vec_for_pre(JavaOut.Root)
print (JavaOut.Root)
deepthlist = []
tree_path = JavaOut.Root
for i in range(len(tree_path)):
words = tree_path[i].strip().split()
deepthlist.append(str(len(words)))
root = ""
rules_str = ""
rules_destart = ""
flag = True
for n in JavaOut.RuleList:
rules_str += str(n) + " "
input_Rules = line2rulevec(rules_str, length[5])
input_func = np.zeros([1])
list_input = []
list_input.append(input_NL)
list_input.append(input_NLChar)
list_input.append(input_Tree)
list_input.append(input_Father)
list_input.append(input_Grand)
list_input.append("")
list_input.append(input_Rules)
v1, v2 = line2rules(rules_str, length[5], father_vec, JavaOut.Nl)
list_input.append(v1)
list_input.append(v2)
global step
step += 1
list_input.append(tree_path_vec)
deepth = " ".join(deepthlist)
print ("------")
print (JavaOut.father_index)
line = ""
for f in JavaOut.father_index:
line += str(f) + " "
print (line)
ret, father_vec, labels = line2mask(line, length[5])
list_input.append(ret)
list_input.append(father_vec)
list_input.append(labels)
for i in range(len(list_input)):
list_input[i] = np.expand_dims(list_input[i], axis=0)
return g_predict_beam(sess, Code_gen_model, list_input)
def WriteJavaIn(JavaOut, action):
f = open(project + "Tree_Rule.in", "w")
f.write(JavaOut.TreeWithEnd)
f.write("\n")
f.write(str(action))
f.write("\n")
f.write(str(JavaOut.Nl))
f.write("\n")
f.close()
def BeamSearch(sess, Code_gen_model, Nl, N, NL_number):
Javaout = getJavaOut(Nl)
global J_NlList
J_NlList = Nl.strip().split()
close_table = {}
close_table[Javaout.Tree] = 1
Beam = [Javaout]
Set_ = Q.PriorityQueue()
level = 0
words = Nl.split()
while True:
level += 1
Set_ = Q.PriorityQueue()
if level > 10000:
N -= 1
for JavaOut in Beam:
if JavaOut.is_end :
Set_.put(JavaOut)
continue
print ("-----------")
res = getAction(sess, Code_gen_model, JavaOut)
list_res = [[res[i], i] for i in range(len(res))]
list_res = sorted(list_res, reverse=True)
count_n = N
for t in range(len(list_res)):
if t >= count_n:
break
i = int(list_res[t][1])
if i < len(Rule) and Rule[i][0] != JavaOut.Node:
count_n += 1
continue
if i >= len(Rule) + len(words):
count_n += 1
continue
if i >= len(Rule) and JavaOut.Node.strip() not in copy_node:
count_n += 1
continue
WriteJavaIn(JavaOut, i )
global global_step
global_step = JavaOut.gs
global step_list_p
step_list_p = deepcopy(JavaOut.step_list)
J_run()
JavaOutNext = getJavaOut(Nl)
JavaOutNext.step_list = step_list_p
JavaOutNext.gs = global_step
if JavaOutNext.state == "error":
count_n += 1
continue
JavaOutNext.RuleList = deepcopy(JavaOut.RuleList)
JavaOutNext.Root = deepcopy(JavaOut.Root) + JavaOutNext.Root
JavaOutNext.rule = deepcopy(JavaOut.rule)
JavaOutNext.father_index = deepcopy(JavaOut.father_index)#.append(father_index_now)
JavaOutNext.father_index.append(father_index_now)
nowtree = JavaOutNext.Tree
print (JavaOutNext.Tree)
apa = 0.6
if JavaOutNext.state == "grow":
print("grow")
print ("{Rule: %s}" % str(i))
if len(JavaOutNext.Tree.split()) > 1000:
continue
JavaOutNext.Probility = (JavaOut.Probility * math.pow(len(JavaOut.RuleList), apa) + math.log(max(1e-10, res[i]))) / math.pow(len(JavaOut.RuleList) + 1, apa)
JavaOutNext.RuleList.append(i + 1)
Set_.put(JavaOutNext)
elif JavaOutNext.state == "end": # BUG!!!!?????
if JavaOutNext.Tree != JavaOut.Tree:
JavaOutNext.Probility = (JavaOut.Probility * math.pow(len(JavaOut.RuleList), apa) + math.log(max(1e-10, res[i]))) / math.pow(len(JavaOut.RuleList) + 1, apa)
else:
JavaOutNext.Probility = JavaOut.Probility
JavaOutNext.is_end = True
Set_.put(JavaOutNext)
Beam = []
endnum = 0
while((not Set_.empty()) and N > len(Beam)):
JavaOut = Set_.get()
print(JavaOut.Probility)
close_table[JavaOut.Tree] = 1
Beam.append(JavaOut)
if JavaOut.is_end:
endnum += 1
if endnum >= N:
f = open(project + "out/"+str(NL_number)+".txt","w")
for JavaOut in Beam:
f.write(JavaOut.Tree)
f.write("\n")
f.write(str(JavaOut.Probility))
f.write("\n")
f.close()
break
def predict():
global Tree_vocabu_size
global NL_vocabu_size
NL_vocabu_size = len(vocabulary)
Tree_vocabu_size = len(tree_vocabulary)
Code_gen_model = code_gen_model(classnum, embedding_size, conv_layernum, conv_layersize, rnn_layernum,
batch_size, NL_vocabu_size, Tree_vocabu_size, NL_len, Tree_len, parent_len, learning_rate, keep_prob, len(char_vocabulary), rules_len)
config = tf.ConfigProto(device_count={"GPU": 0})
#config = tf.ConfigProto(allow_soft_placement=True)
#config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
create_model(sess, "", "")
f = open(project + "input.txt", "r")
lines = f.readlines()
f.close()
for i in range(len(lines)):
Nl = lines[i].strip()
print(Nl)
f = open(project + "Tree_Feature.out", "w")
f.write("root ^")
f.write("\n")
f.write("root")
f.write("\n")
f.write("Unknown")
f.write("\n")
f.write("root\n")
f.write("root node_gen ^ ^\n")
f.write("root node_gen ^ ^\n")
f.write("Unknown root ^ ^\n")
f.write("Unknown Unknown ^ ^\n")
f.write("root node_gen ^\n")
f.write("Unknown root ^ ^\n")
f.write("Unknown Unknown ^ ^\n")
f.close()
BeamSearch(sess, Code_gen_model, Nl, int(sys.argv[2]), i)
print(str(i) + "th code is finished")
def read_copy_node():
f = open(project + "copy_node.txt", "r")
lines = f.readlines()
f.close()
for line in lines:
copy_node.append(line.strip())
def main():
read_copy_node()
print ("predict start")
predict()
main()
| [
"anonymous@anonymous.none"
] | anonymous@anonymous.none |
f7d0ebc5b5c74035f2e5e648525b0bdabb67d31e | ee53b0262007b2f0db0fe15b2ad85f65fafa4e25 | /Leetcode/441. Arranging Coins.py | dfa616241b4d3e2f18fe71fc819dff41930a76d6 | [] | no_license | xiaohuanlin/Algorithms | bd48caacb08295fc5756acdac609be78e143a760 | 157cbaeeff74130e5105e58a6b4cdf66403a8a6f | refs/heads/master | 2023-08-09T05:18:06.221485 | 2023-08-08T11:53:15 | 2023-08-08T11:53:15 | 131,491,056 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,616 | py | '''
You have a total of n coins that you want to form in a staircase shape, where every k-th row must have exactly k coins.
Given n, find the total number of full staircase rows that can be formed.
n is a non-negative integer and fits within the range of a 32-bit signed integer.
Example 1:
n = 5
The coins can form the following rows:
¤
¤ ¤
¤ ¤
Because the 3rd row is incomplete, we return 2.
Example 2:
n = 8
The coins can form the following rows:
¤
¤ ¤
¤ ¤ ¤
¤ ¤
Because the 4th row is incomplete, we return 3.
'''
import unittest
class Solution(object):
def arrangeCoins(self, n):
"""
:type n: int
:rtype: int
"""
# stair = 0
# while n > 0:
# stair += 1
# n -= stair
# if n < 0:
# stair -= 1
# break
# return stair
# ------------
# i = 0
# while True:
# sum_ = i*(i+1)/2
# if sum_ > n:
# return i - 1
# i += 1
# -------------
# use the root of the function to get answer
return int(((8*n+1)**0.5 - 1)/2)
class TestSolution(unittest.TestCase):
def test_arrangeCoins(self):
examples = (
(0, 0),
(1, 1),
(5, 2),
(8, 3),
)
for first, second in examples:
self.assert_function(first, second)
def assert_function(self, first, second):
self.assertEqual(Solution().arrangeCoins(first), second, msg="first: {}; second: {}".format(first, second))
unittest.main() | [
"derek.xiao@loftk.us"
] | derek.xiao@loftk.us |
0b3ecd57bb1f62bd8d1743686431eb9fbfac136e | 4e33860067fa214b4553070b4bd4126e52bf0d75 | /out/production/code/python/13-roman-to-integer.py | f3f6f1f9d2ba9b6202c2e4e3cb785cbddd9af3fb | [] | no_license | echocheergo/algorithms | d5bd0c586de70ab5af4f27202ab6cf06eba50fd9 | ce13bc1d05438f01272e52d21990c1b08bc683f7 | refs/heads/master | 2020-03-27T21:13:53.304610 | 2018-10-07T21:50:38 | 2018-10-07T21:50:38 | 147,126,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,115 | py | #!/usr/bin/env python
# alternatively use a dictionary to map value
# index remember to update in each case
class Solution:
def __init__(self):
pass
def value(self, c):
if (c == 'I'):
return 1
if (c == 'V'):
return 5
if (c == 'X'):
return 10
if (c == 'L'):
return 50
if (c == 'C'):
return 100
if (c == 'D'):
return 500
if (c == 'M'):
return 1000
return -1
def romanToInt(self, s):
"""
:type s: str
:rtype: int
"""
res = 0
i = 0
while (i < len(s)):
v1 = self.value(s[i])
if (i+1 < len(s)):
v2 = self.value(s[i+1])
if (v1 >= v2):
res = res + v1
i = i + 1
else:
res = res + v2 - v1
i = i + 2
else:
res = res + v1
i += 1
return res
a = Solution()
print(a.romanToInt('DCXXI'))
| [
"wuyan20080604@gnmail.com"
] | wuyan20080604@gnmail.com |
da67a21badb1298f5bc31646c651fdfe11dfdd15 | 7a68bc916060f003089754519d778a24f4c3f6da | /scripts/generate_synthetic_data.py | c92c8016403ddc5dec24f85826ce1a9237b00d6c | [] | no_license | Asashou/ffn-tracer | 167276c9a0e7152078466422a56e80f07844dfe5 | cbcdea5a6e7d82006646a593ae0408e5437ac24b | refs/heads/master | 2023-02-28T05:19:49.341189 | 2020-03-06T01:34:06 | 2020-03-06T01:34:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 948 | py | """
Generate a synthetic dataset for testing.
usage:
python generate_synthetic_data.py --out_dir ./synthetic-data
"""
import argparse
from fftracer.datasets.synthetic import SyntheticDataset2D
def main(out_dir, num_training_coords):
dset = SyntheticDataset2D()
dset.initialize_synthetic_data_patch(dataset_shape=(1000, 1000), patch_size=(49, 49))
# write the synthetic data
dset.write_tfrecord(out_dir)
# write some synthetic coordinates
dset.generate_and_write_training_coordinates(out_dir, num_training_coords)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--out_dir", help="directory to save to", required=True)
parser.add_argument("--num_training_coords", help="number of training coordinates "
"to generate", type=int,
default=100)
args = parser.parse_args()
main(**vars(args))
| [
"joshua.patrick.gardner@gmail.com"
] | joshua.patrick.gardner@gmail.com |
cfc155b48e7139b1bf1bea71e66f59e91f6f6b50 | d7c527d5d59719eed5f8b7e75b3dc069418f4f17 | /main/_pythonSnippet1_backup/61/views.py | 3e9bacefeb4c0afffa4042075dad295c84f00a02 | [] | no_license | Aivree/SnippetMatcher | 3e348cea9a61e4342e5ad59a48552002a03bf59a | c8954dfcad8d1f63e6e5e1550bc78df16bc419d1 | refs/heads/master | 2021-01-21T01:20:59.144157 | 2015-01-07T04:35:29 | 2015-01-07T04:35:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,894 | py | from django.shortcuts import render_to_response
from django.template import Template, Context, RequestContext
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.template.loader import get_template
import datetime
from django import forms
from runner.forms import DocumentForm
from runner.models import Document
def list(request):
# Handle file upload
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
newdoc = Document(docfile = request.FILES['docfile'])
newdoc.save()
# Redirect to the document list after POST
return HttpResponseRedirect(reverse('runner.views.list'))
else:
form = DocumentForm() # A empty, unbound form
# Load documents for the list page
documents = Document.objects.all()
# Render list page with the documents and the form
return render_to_response(
'list.html',
{'documents': documents, 'form': form},
context_instance=RequestContext(request)
)
def index(request):
from runner.models import Software
software_list = []
for i in Software.objects.all():
i = str(i).split("|")
software_list.append(i)
t = get_template("bootstrap3.html")
html = t.render(Context({
'bootstrap3_title': 'Run programs',
'software_list': software_list,
}))
return HttpResponse(html)
def software(request, name):
t = get_template("bootstrap3.html")
html = t.render(RequestContext(request, {
'bootstrap3_title': 'Running ' + name,
}))
return HttpResponse(html)
def current_datetime(request):
now = datetime.datetime.now()
t = get_template("bootstrap3.html")
html = t.render(Context({'current_date': now}))
return HttpResponse(html)
| [
"prateek1404@gmail.com"
] | prateek1404@gmail.com |
42dc6d18884578c84f4ca5272b7590683a423d4d | 532549735aab20e7948511b63e0fb77cc5aedacf | /chaussette/backend/_fastgevent.py | c43809bd8d374be7c03b29174b2ce058a6b65653 | [
"Apache-2.0"
] | permissive | ericem/chaussette | f71ac35990b2b7aa41610ec4be867321ce3be89f | fe62725ca1d018bb26c024f796447b6c761f00e0 | refs/heads/master | 2021-01-18T10:52:43.720192 | 2013-05-02T13:38:23 | 2013-05-02T13:38:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 928 | py | import socket
from gevent.wsgi import WSGIServer
from gevent import monkey
from chaussette.util import create_socket
class Server(WSGIServer):
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
def __init__(self, listener, application=None, backlog=None,
spawn='default', log='default', handler_class=None,
environ=None, **ssl_args):
monkey.noisy = False
monkey.patch_all()
host, port = listener
self.socket = create_socket(host, port, self.address_family,
self.socket_type, backlog=backlog)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server_address = self.socket.getsockname()
log = None
super(Server, self).__init__(self.socket, application, None, spawn,
log, handler_class, environ, **ssl_args)
| [
"tarek@ziade.org"
] | tarek@ziade.org |
17cd33c5d17289d833d5794b6651cf7fe8007a50 | c67467031f40610be2bfdbb8e6adb4c5fcd0a6d2 | /python_fileio/sys_argv.py | 11cefd1fcfe0e42313d9e88097d1b131fb82b9fe | [] | no_license | Hankang-Hu/python_code | 1685b9b7c0b24abf6ebc7bd87368a84457d0e131 | 378a9adf9e123bb6d897b1247eda9de3edadf768 | refs/heads/master | 2020-03-10T08:11:03.040878 | 2018-05-11T05:15:40 | 2018-05-11T05:15:40 | 129,279,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90 | py | import sys
print('you entered',len(sys.argv),'arguments')
print('you entered:',sys.argv)
| [
"1121394540@qq.com"
] | 1121394540@qq.com |
71674dfa6ff7c8aca9d8e0aa4fb282ba0b9c47c7 | 7ec745b4b881d571c66c4ca41b6739425552c617 | /rangepractice_1.py | 0eb23ec1690161d9803ad34d6fca0a8a1fb8e788 | [] | no_license | JafarSoftware/python102 | 1e0a3c1b6c98d359db8a7ea6dbf6d600951d15d2 | c8ee7727bf9b337697a39d19f4e950d52c3334cc | refs/heads/master | 2022-12-06T09:04:00.716916 | 2020-08-21T02:53:41 | 2020-08-21T02:53:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | #this is a range program, it is for practice. delete after use.
letter = range (1000)
print(letter) | [
"69733769+Softjafar00@users.noreply.github.com"
] | 69733769+Softjafar00@users.noreply.github.com |
382edcead6cce954feaded0e363727a072ae9a8c | f079ff39d64b967f4235d9c22252064fdecda469 | /pfg/donationform/cart.py | 5315322d673ef646b1e14765df5ab6dabdfad477 | [] | no_license | collective/pfg.donationform | 80b2aa2227af99632d357591dbf49cbfe1f58b5a | 1dcbc023a816ac2e37629d7e065d278b9625d587 | refs/heads/master | 2023-06-26T16:39:07.666882 | 2017-10-06T14:54:33 | 2017-10-06T14:54:33 | 105,947,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,366 | py | from zope.event import notify
from zope.component import adapts, queryUtility
from zope.interface import implements, alsoProvides
from getpaid.core.interfaces import ILineItemFactory, IShoppingCart
from getpaid.core.item import PayableLineItem, RecurringLineItem
from pfg.donationform.interfaces import IDonationFieldSet, DonationCreatedEvent, IDonationCart
from Products.CMFPlone.utils import safe_unicode
try:
from zope.intid.interfaces import IIntIds
IIntIds
except ImportError:
IIntIds = None
try:
from Products.PloneGetPaid import sessions
sessions
except ImportError:
sessions = None
class DonationFieldLineItemFactory(object):
implements(ILineItemFactory)
adapts(IShoppingCart, IDonationFieldSet)
def __init__(self, cart, field):
self.cart = cart
self.field = field
form = field.REQUEST.form
fname = self.field.getId()
self.amount = form.get(fname + '_level')
if not self.amount:
self.amount = form.get(fname + '_amount', '0')
self.amount = self.amount.lstrip('$')
self.is_recurring = form.get(fname + '_recurring', False)
self.occurrences = form.get(fname + '_occurrences', 9999)
def create(self):
pfg = self.field.aq_parent
if self.is_recurring:
item = RecurringLineItem()
item.interval = 1
item.unit = 'months'
item.total_occurrences = self.occurrences
else:
item = PayableLineItem()
item.item_id = self.field.UID()
if IIntIds:
intid_utility = queryUtility(IIntIds)
if intid_utility:
item.uid = intid_utility.register(self.field)
item.name = safe_unicode(pfg.Title())
item.cost = float(self.amount)
item.quantity = 1
# Clear the cart before adding the donation.
# We don't want to surprise users by charging them for something
# they didn't realize they were buying!
for key in self.cart.keys():
del self.cart[key]
self.cart[item.item_id] = item
alsoProvides(self.cart, IDonationCart)
notify(DonationCreatedEvent(self.cart))
try:
sessions.set_came_from_url(pfg)
except:
pass
return item
| [
"david@glicksoftware.com"
] | david@glicksoftware.com |
cf1aea9c373dd9423399867736861e3d9fddb55f | f4c9f0b4254f878820b7003d6a73b03f3a95b54b | /surveyapp/urls.py | 58a8391346baa3fccc31394a28dc9073a5e156e4 | [] | no_license | carlomoan/surveyapp | c3a7235baf2cbf269aea839d5746871938161d7c | 3c4bd4d59e250eaf1f0a1d2a92c21925eb002bc6 | refs/heads/main | 2023-06-05T11:23:40.508620 | 2021-06-19T11:32:53 | 2021-06-19T11:32:53 | 342,884,338 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,115 | py | """surveyapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('accounts.urls')),
path('survey/',include('survey_project.urls'))
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"agbbelly89@gmail.com"
] | agbbelly89@gmail.com |
40e14b319898e3aa7c0b8261d42eea8d55f52f5d | 1c2cd5951f82a5fb12142621a3b9baea14cf4a31 | /ABC012/ABC012B.py | 93560073a8f77c4f747fddf4fd00cc740a476428 | [] | no_license | ksera332/Atcoder_records | 55ef832eb3e517b99334eb00d2287cd4a1bc83cd | f8b1f13211bae79b7da6d63ba9b1bd177031aef9 | refs/heads/master | 2022-12-08T19:25:26.834948 | 2020-08-29T12:55:30 | 2020-08-29T12:55:30 | 263,906,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py |
# coding: utf-8
# In[37]:
import time
N = int(input())
print(time.strftime('%H:%M:%S', time.gmtime(N)))
| [
"ksera6@gmail.com"
] | ksera6@gmail.com |
f9331a0b224367efbac047a5657beb72ee78ef42 | 968a2fafd0aef64e9e182f19408a7f3b19f545aa | /test.py | 0e67c413dc8a0a78e70386744fcd310f8050712a | [] | no_license | winniex0412/arm-robot | 218137752eb02cbefde301814261176015ebb072 | f425617a2d38b4afb9fe5e1536bc3b753d523831 | refs/heads/master | 2022-07-18T10:00:48.072058 | 2020-05-19T01:02:01 | 2020-05-19T01:02:01 | 265,103,896 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | import numpy
pts1 = [1, 2]
for i in range(2):
A.append([[pts1[i], 1], [pts1[i], 1]) | [
"winniex0412@gmail.com"
] | winniex0412@gmail.com |
453df2757c93d60538dfe166be1476dceac2238d | a82cacae7081f4a04fabdf87cb2833d4ce8bc6db | /venv/bin/easy_install-3.7 | 24bbc69ec06fbb08b587a66a965844a5e5c05e2c | [] | no_license | sadhikari89/mfscrm | 55909712f808344611d8ed3b45b3cc17d88125e9 | ea1275f8a97542a07e0ffc08d5b597d7e95d2490 | refs/heads/master | 2020-03-31T06:30:01.629465 | 2018-10-08T05:11:38 | 2018-10-08T05:11:38 | 151,985,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | 7 | #!/Users/surajadhikari/Desktop/Suraj/foodservice/mfscrm/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"surajadhikari@unomaha.edu"
] | surajadhikari@unomaha.edu |
21e280b471783f50c1733739e8d58cef548fa3c4 | e93b4e5306e168067129d7272cf234476a837fcf | /Python3/utils/download.py | 0d811f43e8527a92bd075c86929a62b732d36ec7 | [] | no_license | OFShare/Records | 0648e2649514f69c3819a83e4e542e2923012f4f | 06cfbf74e2617082842dee4c40509fcc024d1e50 | refs/heads/master | 2022-02-07T09:05:50.677146 | 2022-01-26T06:49:41 | 2022-01-26T06:49:41 | 231,546,626 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 921 | py | import gzip
import os
import time
import shutil
import tempfile
import numpy as np
from six.moves import urllib
import tensorflow as tf
def download(directory, filename):
"""Download images of filename to directory"""
if not tf.gfile.Exists(directory):
tf.gfile.MakeDirs(directory)
with open(filename,'r+') as fin:
lines = fin.readlines()
count = 0
for url in lines:
count +=1
filepath = os.path.join(directory,str(count)+'.jpg')
if tf.gfile.Exists(filepath):
print('processed %d image.',count)
continue
try:
time.sleep(1)
urllib.request.urlretrieve(url,filepath)
except Exception as e:
print('Acui what error: ',str(e))
time.sleep(10)
print('processing %d image.',count)
if __name__=='__main__':
download('costa_datas','/home/acui/Downloads/costa.txt')
| [
"OFShare@outlook.com"
] | OFShare@outlook.com |
bdb98d4915ddab916a9a01b0e1eaf94996469c75 | f46f73b81a39b6ed87bad97db9a8b8a51cc66e3d | /django_project/django_project/settings.py | ad09bbda76036e317ea600e423df88a8ce107927 | [] | no_license | mspstead/FloodSite | 3c875e8eee8febfdf5fe80824585752ce7acd849 | 1bc30d1105671492190339417ed4782aa51b6af7 | refs/heads/master | 2021-07-05T17:40:03.874018 | 2020-07-05T16:07:42 | 2020-07-05T16:07:42 | 52,359,841 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,259 | py | """
Django settings for django_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'RYVhlyK9EJZjAG9PRhK0A5x3Piq5O7U9910phFyNPhiZL25XyX'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'leaflet',
'twitter_bootstrap',
'flood',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'django_project.urls'
WSGI_APPLICATION = 'django_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django',
'USER': 'django',
'PASSWORD': 'AgZP7U56Mw',
'HOST': 'localhost',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATICFILES_DIR = '/django_project/django_project/static/'
STATIC_ROOT = '/django_project/django_project/static/'
STATIC_URL = '/static/' | [
"mspstead@gmail.com"
] | mspstead@gmail.com |
56b5f1fee43c2e8ebbd8a2c77d56f436829efdf3 | 015bddd50c0e2ac5ecb689030bdcc901420f2e4a | /generic/__init__.py | b54ed53db095f36abaa9f8d4dffb5ff094042d42 | [] | no_license | KobaLarrieu/metaheuristique | 1cf651a5679bdba30274ce556c98934b69c0202f | 2b5daa53081fe48947ad69d90bd22422c4ed5a70 | refs/heads/master | 2023-03-17T00:56:10.156553 | 2019-05-23T10:42:53 | 2019-05-23T10:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 354 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from local_search_algorithm import LocalSearchAlgorithm
from population_search_algorithm import PopulationSearchAlgorithm
from solution import Solution, BinarySolution, PermutationSolution, RealSolution, VectorBinarySolution
from problem import Problem
from evolution_strategy import EvolutionStrategy
| [
"clement.bellanger3@etu.univ-lorraine.fr"
] | clement.bellanger3@etu.univ-lorraine.fr |
e9222d3599e353156217730a4903521d6e392997 | 90c6262664d013d47e9a3a9194aa7a366d1cabc4 | /tests/storage/cases/test_KT1QHRKLkwaHDV6TyY9H4ZU9ZwGuwZ1TWPfg_babylon.py | 69dc0227141ef450501ea7063314cad598bd84b6 | [
"MIT"
] | permissive | tqtezos/pytezos | 3942fdab7aa7851e9ea81350fa360180229ec082 | a4ac0b022d35d4c9f3062609d8ce09d584b5faa8 | refs/heads/master | 2021-07-10T12:24:24.069256 | 2020-04-04T12:46:24 | 2020-04-04T12:46:24 | 227,664,211 | 1 | 0 | MIT | 2020-12-30T16:44:56 | 2019-12-12T17:47:53 | Python | UTF-8 | Python | false | false | 1,170 | py | from unittest import TestCase
from tests import get_data
from pytezos.michelson.converter import build_schema, decode_micheline, encode_micheline, micheline_to_michelson
class StorageTestKT1QHRKLkwaHDV6TyY9H4ZU9ZwGuwZ1TWPfg_babylon(TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = None
cls.contract = get_data('storage/mainnet/KT1QHRKLkwaHDV6TyY9H4ZU9ZwGuwZ1TWPfg_babylon.json')
def test_storage_encoding_KT1QHRKLkwaHDV6TyY9H4ZU9ZwGuwZ1TWPfg_babylon(self):
type_expr = self.contract['script']['code'][1]
val_expr = self.contract['script']['storage']
schema = build_schema(type_expr)
decoded = decode_micheline(val_expr, type_expr, schema)
actual = encode_micheline(decoded, schema)
self.assertEqual(val_expr, actual)
def test_storage_schema_KT1QHRKLkwaHDV6TyY9H4ZU9ZwGuwZ1TWPfg_babylon(self):
_ = build_schema(self.contract['script']['code'][0])
def test_storage_format_KT1QHRKLkwaHDV6TyY9H4ZU9ZwGuwZ1TWPfg_babylon(self):
_ = micheline_to_michelson(self.contract['script']['code'])
_ = micheline_to_michelson(self.contract['script']['storage'])
| [
"mz@baking-bad.org"
] | mz@baking-bad.org |
078e7534de86ed7c579a2ba0c616d3db8756b6be | d32a1eff193052dd62ad05f638346c7132796c2e | /python/pyspark/pandas/tests/connect/test_parity_groupby_slow.py | 375dc703d956f229358f88f2ca4bde9e8f96075a | [
"CC0-1.0",
"MIT",
"Python-2.0",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-free-unknown",
"EPL-1.0",
"Classpath-exception-2.0",
"GCC-exception... | permissive | Kyligence/spark | c266dc19c7c2e2914eea34c9922f97ba17011075 | f29502acf2fe96e23525268b0a29a6338b41bce6 | refs/heads/master | 2023-08-31T08:42:15.254881 | 2023-04-22T00:30:53 | 2023-04-22T00:30:53 | 100,349,194 | 6 | 61 | Apache-2.0 | 2023-09-14T06:29:07 | 2017-08-15T07:04:07 | Scala | UTF-8 | Python | false | false | 2,010 | py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from pyspark.pandas.tests.test_groupby_slow import GroupBySlowTestsMixin
from pyspark.testing.connectutils import ReusedConnectTestCase
from pyspark.testing.pandasutils import PandasOnSparkTestUtils, TestUtils
class GroupBySlowParityTests(
GroupBySlowTestsMixin, PandasOnSparkTestUtils, TestUtils, ReusedConnectTestCase
):
@unittest.skip("Fails in Spark Connect, should enable.")
def test_diff(self):
super().test_diff()
@unittest.skip("Fails in Spark Connect, should enable.")
def test_dropna(self):
super().test_dropna()
@unittest.skip("Fails in Spark Connect, should enable.")
def test_rank(self):
super().test_rank()
@unittest.skip("Fails in Spark Connect, should enable.")
def test_split_apply_combine_on_series(self):
super().test_split_apply_combine_on_series()
if __name__ == "__main__":
from pyspark.pandas.tests.connect.test_parity_groupby_slow import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| [
"gurwls223@apache.org"
] | gurwls223@apache.org |
53869b1c06476c15712838b6b978e6f1f9dd61ec | 17884d3aa81bd3d3cade878c9519d002f0190903 | /server/api/chat.py | 6f2d857be8a0b8a6781fbd567ecadfd4f2f2c535 | [] | no_license | ar3s3ru/uChan3 | 10b17cd684055ebadec28036886efc6a5911a2c7 | 5ef0803c904e7b37bfded7df379b3f39f2dedec9 | refs/heads/master | 2021-01-09T06:58:01.823259 | 2016-01-24T23:02:16 | 2016-01-24T23:02:16 | 49,897,591 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,964 | py | from server import uchan
from server.api import AuthEntity
from server.api import handler, handler_data, handler_args
from server.models import User, ThreadUser, ChatRequest, Chat, Message
from server.common import responses, JSONRepresentation
from server.common.routines import str_to_bool
# --------------------------------------------------------------------------------------------------------------------->
# Chat requests # -------------------------------------------------------------------------------------------------->
# --------------------------------------------------------------------------------------------------------------------->
class ChatRequestAPI(AuthEntity):
@staticmethod
def get_threaduser(id: int):
return ThreadUser.query.get(id)
@handler
def get(self):
def requests_routine(user: User):
return responses.successful(200, [JSONRepresentation.chatrequest(req)
for req in user.get_requests() if req.accepted is False])
return self.session_oriented_request(requests_routine)
@handler
def post(self, id: int):
def request_routine(user: User):
threaduser = ChatRequestAPI.get_threaduser(id)
if threaduser is None:
return responses.client_error(404, 'User map object does not exists')
if user.has_requested_chat(threaduser.user):
return responses.client_error(409, 'Chat already requested')
request = ChatRequest(user.id, threaduser.user)
uchan.add_to_db(request)
return responses.successful(201, 'Chat request sent')
return self.session_oriented_request(request_routine)
uchan.api.add_resource(ChatRequestAPI, '/api/chat/request', '/api/chat/request/<int:id>')
# --------------------------------------------------------------------------------------------------------------------->
# --------------------------------------------------------------------------------------------------------------------->
# Chat # ------------------------------------------------------------------------------------------------------------>
# --------------------------------------------------------------------------------------------------------------------->
def accept_chat_routine(user: User, func, id: int, *args, **kwargs):
chatrequest = AcceptChatAPI.get_chatrequest(id)
if chatrequest is None:
responses.client_error(404, 'Chat request does not exists')
if chatrequest.u_to != user.id:
responses.client_error(401, 'Cannot use this chat request')
if chatrequest.accepted:
responses.client_error(409, 'Chat request already accepted')
return func(user, chatrequest, *args, **kwargs)
class AcceptChatAPI(AuthEntity):
@staticmethod
def get_chatrequest(id: int):
return ChatRequest.query.get(id)
@staticmethod
def get_chat(id: int):
return Chat.query.get(id)
@handler
def post(self, id: int):
def accepting_routine(user: User, chatrequest: ChatRequest):
# Define new Chat entity
chat = Chat(chatrequest.u_from, chatrequest.u_to)
chatrequest.accept()
uchan.add_to_db(chat)
# Return new chat
return responses.successful(201, 'Chat request accepted')
return self.session_oriented_request(accept_chat_routine, accepting_routine, id)
@handler
def delete(self, id: int):
def deleting_routine(user: User, chatrequest: ChatRequest):
# Delete ChatRequest
uchan.delete_from_db(chatrequest)
return '', 204
return self.session_oriented_request(accept_chat_routine, deleting_routine, id)
uchan.api.add_resource(AcceptChatAPI, '/api/chat/accept/<int:id>')
# --------------------------------------------------------------------------------------------------------------------->
| [
"b.zurchiele80@gmail.com"
] | b.zurchiele80@gmail.com |
601207597dad05b93d97e0c28bba76bc1248a8c9 | 80da118a0c597a9ff883d482ae9a3a2e437369c0 | /street_app_server/app/database/model.py | 56dc512f3ed16ee02851e298fc16170b73edae76 | [] | no_license | ChaosJohn/temple_light | 51fc37ef99b450bdf321a4ae1341579675b2336a | fb3f6af916ffa0c72a60ec8ebac7cd88adfc8a6c | refs/heads/master | 2023-01-04T13:42:08.632680 | 2020-06-14T12:22:29 | 2020-06-14T12:22:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 55,826 | py | #!/usr/bin/env python3
# coding: utf8
# 20170220 anChaOs
import json
from datetime import datetime, timedelta
# from sqlalchemy import Column, Integer, String, DateTime, Text
from werkzeug.security import generate_password_hash, check_password_hash
from flask.ext.login import UserMixin
from sqlalchemy.sql import func
from app import db, lm
from config import config
from app.tool import dbg # for print
from app.tool import print_exception_info # for traceback
def to_dict(self):
return {c.name: getattr(self, c.name, None) for c in self.__table__.db.Columns}
# d = {}
# for c in self.__table__.db.Columns:
# attr = getattr(self, c.name, None)
# d[c.name] = attr
# if isinstance(attr, db.DateTime):
# d[c.name] = int(attr.timestamp())
# # 检查是否是json 字符串
# try:
# data = json.loads(d[c.name])
# if isinstance(data, dict) or isinstance(data, list):
# d[c.name[1:]] = data
# except:
# continue
# if 'pswd_h' in d:
# d.pop('pswd_h')
# return d
db.Model.to_dict = to_dict
"""
@ 用户表
roll 角色:0玩家,1商家,2销售员,3总代理,4总部
belong 所属:当roll为0时,该字段表示所属的代理,默认值为1。为0表示非玩家
"""
class User(db.Model, UserMixin):
__tablename__ = 'mafu_user'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(24), nullable=False)
phone = db.Column(db.String(24), nullable=False)
email = db.Column(db.String(64), nullable=False)
pswd_h = db.Column(db.String(128), nullable=False) # password_hash
roll = db.Column(db.Integer, nullable=False, server_default="0")
nickname = db.Column(db.String(128), nullable=True, server_default="")
sex = db.Column(db.String(1), nullable=False, server_default="0")
province = db.Column(db.String(64), nullable=False, server_default="")
city = db.Column(db.String(64), nullable=False, server_default="")
country = db.Column(db.String(64), nullable=False, server_default="")
headimgurl = db.Column(db.String(512), nullable=False, server_default="")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, name, phone, email, password, roll, wechat_body=None):
self.name = name
self.phone = phone
self.email = email
self.pswd_h = generate_password_hash(password)
self.roll = roll
if wechat_body and isinstance(wechat_body, dict):
self.nickname = wechat_body.get('nickname', '')
self.sex = wechat_body.get('sex', '')
self.province = wechat_body.get('province', '')
self.city = wechat_body.get('city', '')
self.country = wechat_body.get('country', '')
self.headimgurl = wechat_body.get('headimgurl', '')
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.pswd_h = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.pswd_h, password)
def get_id(self):
return self.id
def __repr__(self):
return '<User %r>' % self.name
"""
@ openluat用户表
"""
DEFAULT_ROLE = '000000000000'
class OpenLuatUser(UserMixin, db.Model):
__tablename__ = 'user'
__bind_key__ = 'openluat_users'
__table_args__ = {"extend_existing": True}
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String(24), unique = True, index = True, nullable = False)
phone = db.Column(db.String(24), unique = True, nullable = False)
email = db.Column(db.String(64), unique = True, index = True, nullable = False)
role = db.Column(db.String(32), nullable = False)
password_hash = db.Column(db.String(128), nullable = False)
creation_time = db.Column(db.DateTime, nullable = False)
last_login_time = db.Column(db.DateTime, nullable = False)
creator_id = db.Column(db.Integer, nullable = False, server_default="0")
nickname = db.Column(db.String(24), nullable = False, server_default="")
address = db.Column(db.String(32), nullable = False, server_default="")
gender = db.Column(db.Integer, nullable = False, server_default="0")
birthday = db.Column(db.DateTime, nullable = False, server_default="2017-01-01 00:00:00")
# sim
company = db.Column(db.String(24), nullable = False, server_default="")
qq = db.Column(db.String(12), nullable = False, server_default="")
wangwang = db.Column(db.String(12), nullable = False, server_default="")
salesman = db.Column(db.String(12), nullable = False, server_default="")
# imei order
invoice_company = db.Column(db.String(24), nullable = False, server_default="")
bank = db.Column(db.String(32), nullable = False, server_default="")
bank_account = db.Column(db.String(32), nullable = False, server_default="")
tax_number = db.Column(db.String(32), nullable = False, server_default="")
fixed_phone = db.Column(db.String(24), nullable = False, server_default="")
shipping_address = db.Column(db.String(32), nullable = False, server_default="")
recipient = db.Column(db.String(32), nullable = False, server_default="")
recipient_phone = db.Column(db.String(24), nullable = False, server_default="")
# @staticmethod
def __init__(self, **kwargs):
super(OpenLuatUser, self).__init__(**kwargs)
@property
def password(self):
raise AttributeError('password is not a readable attribute')
def get_id(self):
return self.id
def get_name(self):
return self.name
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<OpenLuatUser %r>' % self.name
@lm.user_loader
def load_user(user_id):
return OpenLuatUser.query.get(int(user_id))
"""
@ 用户表
admin_uid 管理员openluat用户id:仅对代理商管理员有效, 0表示不是代理商
"""
class WechatUser(db.Model):
__tablename__ = 'wechat_user'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.ForeignKey(u'mafu_user.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
admin_uid = db.Column(db.Integer, nullable=False, server_default='0')
openid = db.Column(db.String(256), index=True, nullable=False, server_default="")
unionid = db.Column(db.String(256), index=True, nullable=False, server_default="")
nickname = db.Column(db.String(128), nullable=True, server_default="")
sex = db.Column(db.String(1), nullable=False, server_default="0")
province = db.Column(db.String(64), nullable=False, server_default="")
city = db.Column(db.String(64), nullable=False, server_default="")
country = db.Column(db.String(64), nullable=False, server_default="")
headimgurl = db.Column(db.String(512), nullable=False, server_default="")
privilege = db.Column(db.Text, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, body):
self.user_id = body['user_id']
self.admin_uid = body['admin_uid']
self.openid = body['openid']
self.unionid = body.get('unionid', '')
self.nickname = body.get('nickname', '')
self.sex = body.get('sex', '')
self.province = body.get('province', '')
self.city = body.get('city', '')
self.country = body.get('country', '')
self.headimgurl = body.get('headimgurl', '')
if isinstance(body.get('privilege', ''), list):
body['privilege'] = json.dumps(body['privilege'])
self.privilege = body.get('privilege', '')
dt = datetime.now()
self.utime = dt
self.ctime = dt
self.status = 0
def __repr__(self):
return '<WechatUser %r>' % self.nickname
class AplyAgent(db.Model):
__tablename__ = 'aply_agent'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.ForeignKey(u'mafu_user.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
hookid = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
name = db.Column(db.String(32), nullable=False)
phone = db.Column(db.String(24), nullable=False)
desc = db.Column(db.String(128), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id, hookid, name, phone, desc):
self.user_id = user_id
self.hookid = hookid
self.name = name
self.phone = phone
self.desc = desc
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
def __repr__(self):
return '<AplyAgent %r>' % self.name
"""
@ 特殊字段说明
salesman 业务员
0非业务员 1业务员
openluat_user_id openluat总表的user_id
存储在aliyun-mysql, openluat_user.user
hook_agent_id 上级代理商的id
0表示自己是总代理
level 代理等级
1商家 2市级代理 3省级代理 4总代理
slevel 业务员等级
1一级业务员 2二级业务员 3三级业务员
expandable 是否可设置下级
0不可以 1可以
withdrawable 充值金额是否可直接体现
0不可以 1可以
"""
class Agent(db.Model):
__tablename__ = 'agent'
id = db.Column(db.Integer, primary_key=True)
salesman = db.Column(db.Integer, nullable=False, server_default='0')
openluat_user_id = db.Column(db.Integer, nullable=False)
hook_agent_id = db.Column(db.Integer, nullable=False, server_default="1")
level = db.Column(db.Integer, nullable=False, server_default='0')
slevel = db.Column(db.Integer, nullable=False, server_default='0')
expandable = db.Column(db.Integer, nullable=False, server_default="0")
withdrawable = db.Column(db.Integer, nullable=False, server_default="0")
name = db.Column(db.String(32), nullable=False)
phone = db.Column(db.String(24), nullable=False)
email = db.Column(db.String(64), nullable=False)
desc = db.Column(db.String(128), nullable=False)
address = db.Column(db.String(128), nullable=False)
remark = db.Column(db.String(128), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, salesman, user_id, hook_agent_id, level, slevel, expandable, withdrawable, name, phone, email, desc, address, remark):
self.salesman = salesman
self.openluat_user_id = user_id
self.hook_agent_id = hook_agent_id
self.level = level
self.slevel = slevel
self.expandable = expandable
self.withdrawable = withdrawable
self.name = name
self.phone = phone
self.email = email
self.desc = desc
self.address = address
self.remark = remark
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
def __repr__(self):
return '<Agent %r>' % self.name
class AgentInfo(db.Model):
__tablename__ = 'agent_info'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
logo = db.Column(db.String(128), nullable=False, server_default=config.DEFAULT_LOGO)
title = db.Column(db.String(32), nullable=False, server_default='码夫支付管理系统')
ctime = db.Column(db.DateTime, nullable=False, server_default=func.now())
utime = db.Column(db.DateTime, nullable=False, server_default=func.now(), onupdate=func.now())
status = db.Column(db.Integer, nullable=False, server_default='0')
def __init__(self, agent_id, logo, title):
self.agent_id = agent_id
self.logo = logo
self.title = title
def __repr__(self):
return '<AgentInfo for agent: %r>' % self.agent_id
"""
@ 商品表
cat 类别:0按摩椅,1娃娃机,2洗衣机,99充值套餐, 100广告优惠券
"""
PRODUCT_CAT_RELAY = 0
PRODUCT_CAT_PULSE = 1
PRODUCT_CAT_WASH = 2
PRODUCT_CAT_RECHARGE = 99
PRODUCT_CAT_COUPON = 100
class Product(db.Model):
__tablename__ = 'product'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
title = db.Column(db.String(64), nullable=False)
body = db.Column(db.String(128), nullable=False)
value = db.Column(db.Integer, nullable=False, server_default='0')
cat = db.Column(db.Integer, nullable=False, server_default='0')
price = db.Column(db.Integer, nullable=False, server_default='1')
inventory = db.Column(db.Integer, nullable=False, server_default='99999')
deleted = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, agent_id, title, body, value, cat, price, inventory):
self.agent_id = agent_id
self.title = title
self.body = body
self.value = value
self.cat = cat
self.price = price
self.inventory = inventory
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
def __repr__(self):
return '<Product %r>' % self.title
class Pay(db.Model):
__tablename__ = 'pay'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, nullable=False)
agent_id = db.Column(db.Integer, nullable=False)
imei = db.Column(db.String(32), nullable=False)
pay_mode = db.Column(db.Integer, nullable=False, server_default="1")
trade_no = db.Column(db.String(24), nullable=False, unique=True, index=True)
product_id = db.Column(db.ForeignKey(u'product.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
title = db.Column(db.String(64), nullable=False)
body = db.Column(db.String(128), nullable=False)
cat = db.Column(db.Integer, nullable=False, server_default='0')
total_fee = db.Column(db.Integer, nullable=False, server_default='1')
prepay_id = db.Column(db.String(64), nullable=False, server_default="")
qrcode = db.Column(db.String(128), nullable=False, server_default="")
ali_trade_no = db.Column(db.String(64), nullable=False, server_default="")
nofity_res = db.Column(db.Text, nullable=False)
cash_fee = db.Column(db.Integer, nullable=False, server_default="0")
deleted = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id, agent_id, imei, trade_no, product_id, title, body, cat, total_fee, prepay_id, qrcode, pay_mode=1):
self.user_id = user_id
self.agent_id = agent_id
self.imei = imei
self.trade_no = trade_no
self.pay_mode = pay_mode
self.product_id = product_id
self.title = title
self.body = body
self.cat = cat
self.total_fee = total_fee
self.prepay_id = prepay_id
self.ali_trade_no = ""
self.qrcode = qrcode
self.nofity_res = ''
self.cash_fee = 0
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
def __repr__(self):
return '<Pay %r>' % self.trade_no
class Refund(db.Model):
__tablename__ = 'refund'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, nullable=False)
pay_id = db.Column(db.ForeignKey(u'pay.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
refund_no = db.Column(db.String(24), nullable=False, unique=True)
total_fee = db.Column(db.Integer, nullable=False, server_default='1')
refund_fee = db.Column(db.Integer, nullable=False, server_default='1')
nofity_res = db.Column(db.Text, nullable=False)
deleted = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id, pay_id, refund_no, total_fee, refund_fee, status):
self.user_id = user_id
self.pay_id = pay_id
self.refund_no = refund_no
self.total_fee = total_fee
self.refund_fee = refund_fee
self.nofity_res = ''
now = datetime.now()
self.ctime = now
self.utime = now
self.status = status
def __repr__(self):
return '<Refund %r>' % self.refund_no
class Record(db.Model):
__tablename__ = 'record'
id = db.Column(db.Integer, primary_key=True)
pay_id = db.Column(db.ForeignKey(u'pay.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False, unique=True)
pay_way = db.Column(db.Integer, nullable=False, server_default="0")
user_id = db.Column(db.Integer, nullable=False)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
product_id = db.Column(db.ForeignKey(u'product.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
stime = db.Column(db.DateTime, nullable=False)
etime = db.Column(db.DateTime, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, pay_id, pay_way, user_id, agent_id, product_id, stime, etime):
self.pay_id = pay_id
self.pay_way = pay_way
self.user_id = user_id
self.agent_id = agent_id
self.product_id = product_id
self.stime = stime
self.etime = etime
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
def __repr__(self):
return '<Record %r>' % self.id
class RecordNoPay(db.Model):
__tablename__ = 'record_no_pay'
id = db.Column(db.Integer, primary_key=True)
imei = db.Column(db.String(32), nullable=False)
user_id = db.Column(db.Integer, nullable=False)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
stime = db.Column(db.DateTime, nullable=False)
etime = db.Column(db.DateTime, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, imei, user_id, agent_id, stime, etime):
self.imei = imei
self.user_id = user_id
self.agent_id = agent_id
self.stime = stime
self.etime = etime
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
def __repr__(self):
return '<RecordNoPay %r>' % self.id
class RecordCouponNopay(db.Model):
__tablename__ = 'record_coupon_no_pay'
id = db.Column(db.Integer, primary_key=True)
imei = db.Column(db.String(32), nullable=False)
user_id = db.Column(db.Integer, nullable=False)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
stime = db.Column(db.DateTime, nullable=False)
etime = db.Column(db.DateTime, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, imei, user_id, agent_id, stime, etime):
self.imei = imei
self.user_id = user_id
self.agent_id = agent_id
self.stime = stime
self.etime = etime
now = datetime.now()
self.ctime = now
self.utime = now
self.status = 0
def __repr__(self):
return '<RecordCouponNopay %r>' % self.id
class GameCoin(db.Model):
__tablename__ = 'game_coin'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.ForeignKey(u'mafu_user.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
coin = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id):
self.user_id = user_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<GameCoin user: %r -- coin: %r>' % (self.user_id, self.coin)
"""
@ 游戏币消费记录表
in_out 类别:1收入 2支出
"""
class CoinRecord(db.Model):
__tablename__ = 'coin_record'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.ForeignKey(u'mafu_user.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
in_out = db.Column(db.Integer, nullable=False, server_default='0')
coin = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id, in_out, coin):
self.user_id = user_id
self.in_out = in_out
self.coin = coin
now = datetime.now()
self.ctime = now
self.utime = now
class DeviceAddress(db.Model):
__tablename__ = 'device_address'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
region = db.Column(db.String(128), nullable=False, server_default="")
address = db.Column(db.String(128), nullable=False, server_default="")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, agent_id, region, address):
self.agent_id = agent_id
self.region = region
self.address = address
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<DeviceAddress: %r %r>' % (self.region, self.address)
"""
@ 设备表
agent_id 设备的起始代理id
owner_agent_id 设备此时拥有者的代理id
salesman_agent_id 设备此时的业务员代理id
l4 总代理提成比例
l3 省级代理提成比例
l2 市级代理提成比例
l1 经销商提成比例
sl1 一级业务员提成比例
sl2 二级业务员提成比例
sl3 三级业务员提成比例
nopay 无需支付
product_unit_price 产品的单位价格
product_min_money 产品最小投币金额
product_unit 产品的单位
"""
class Device(db.Model):
__tablename__ = 'device'
id = db.Column(db.Integer, primary_key=True)
imei = db.Column(db.String(32), nullable=False, unique=True, index=True)
cat = db.Column(db.Integer, nullable=False, server_default="0")
agent_id = db.Column(db.Integer, nullable=False, server_default="0")
owner_agent_id = db.Column(db.Integer, nullable=False)
salesman_agent_id = db.Column(db.Integer, nullable=False)
address_id = db.Column(db.Integer, nullable=False, server_default="0")
map_display = db.Column(db.Integer, nullable=False, server_default="0")
l4 = db.Column(db.Float, nullable=False)
l3 = db.Column(db.Float, nullable=False)
l2 = db.Column(db.Float, nullable=False)
l1 = db.Column(db.Float, nullable=False)
sl1 = db.Column(db.Float, nullable=False)
sl2 = db.Column(db.Float, nullable=False)
sl3 = db.Column(db.Float, nullable=False)
remark = db.Column(db.String(64), nullable=False, server_default="")
nopay = db.Column(db.Integer, nullable=False, server_default="0")
coupon = db.Column(db.Integer, nullable=False, server_default="0")
product_unit_price = db.Column(db.Integer, nullable=False, server_default="100")
product_min_money = db.Column(db.Integer, nullable=False, server_default="100")
product_unit = db.Column(db.String(10), nullable=False, server_default="个")
product_unit_pluse = db.Column(db.Integer, nullable=False, server_default="1")
low = db.Column(db.Integer, nullable=False, server_default='50')
high = db.Column(db.Integer, nullable=False, server_default='50')
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, imei, cat, agent_id, owner_agent_id, address_id):
self.imei = imei
self.agent_id = agent_id
self.owner_agent_id = owner_agent_id
self.salesman_agent_id = 0
self.address_id = address_id
self.cat = cat
self.l1 = 0
self.l2 = 0
self.l3 = 0
self.l4 = 1
self.sl1 = 0
self.sl2 = 0
self.sl3 = 0
self.remark = ''
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<Device: %r>' % self.imei
class DeviceDistribution(db.Model):
__tablename__ = 'device_distribution'
id = db.Column(db.Integer, primary_key=True)
device_id = db.Column(db.Integer, nullable=False)
imei = db.Column(db.String(32), nullable=False)
from_agent = db.Column(db.Integer, nullable=False)
to_agent = db.Column(db.Integer, nullable=False)
rate = db.Column(db.Float, nullable=False, server_default='0.0')
ctime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, device_id, imei, from_agent, to_agent, rate):
self.device_id = device_id
self.imei = imei
self.from_agent = from_agent
self.to_agent = to_agent
self.rate = rate
now = datetime.now()
self.ctime = now
self.status = 0
def __repr__(self):
return '<DeviceDistribution: %r>' % self.id
class DeviceDistributionSalesman(db.Model):
__tablename__ = 'device_distribution_salesman'
id = db.Column(db.Integer, primary_key=True)
device_id = db.Column(db.Integer, nullable=False)
imei = db.Column(db.String(32), nullable=False)
from_agent = db.Column(db.Integer, nullable=False)
to_agent = db.Column(db.Integer, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, device_id, imei, from_agent, to_agent):
self.device_id = device_id
self.imei = imei
self.from_agent = from_agent
self.to_agent = to_agent
now = datetime.now()
self.ctime = now
self.status = 0
def __repr__(self):
return '<DeviceDistributionSalesman: %r>' % self.id
class DeviceProduct(db.Model):
__tablename__ = 'device_product'
id = db.Column(db.Integer, primary_key=True)
device_id = db.Column(db.ForeignKey(u'device.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
product_id = db.Column(db.ForeignKey(u'product.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, device_id, product_id):
self.device_id = device_id
self.product_id = product_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<DeviceProduct: %r>' % self.id
class DailyIncome(db.Model):
__tablename__ = 'daily_income'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.Integer, nullable=False)
online_income = db.Column(db.Integer, nullable=False)
offline_income = db.Column(db.Integer, nullable=False)
total_consume = db.Column(db.Integer, nullable=False)
total_balance = db.Column(db.Integer, nullable=False)
date = db.Column(db.Date, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, agent_id, online_income, offline_income, total_consume, total_balance, date):
self.agent_id = agent_id
self.online_income = online_income
self.offline_income = offline_income
self.total_consume = total_consume
self.total_balance = total_balance
self.date = date
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<DailyIncome: %r>' % self.id
"""
@ 代理设置表
min_withdraw 最小取现金额
l1,l2,l3 分别对应一级,二级,三级代理商的提成
withdraw_fee 取现手续费
wallet_pay_enable 是否支持钱包支付
trans_url 透传回调地址
"""
class AgentSetting(db.Model):
__tablename__ = 'agent_setting'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
min_withdraw = db.Column(db.Integer, nullable=False)
l1 = db.Column(db.Float, nullable=False, server_default="0.0")
l2 = db.Column(db.Float, nullable=False, server_default="0.0")
l3 = db.Column(db.Float, nullable=False, server_default="0.0")
withdraw_fee = db.Column(db.Float, nullable=False, server_default="0.0")
wallet_pay_enable = db.Column(db.Integer, nullable=False, server_default="1")
trans_url = db.Column(db.String(128), nullable=False, server_default='')
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, agent_id, min_withdraw, l1, l2, l3, withdraw_fee, wallet_pay_enable):
self.agent_id = agent_id
self.min_withdraw = min_withdraw
self.l1 = l1
self.l2 = l2
self.l3 = l3
self.withdraw_fee = withdraw_fee
self.wallet_pay_enable = wallet_pay_enable
self.trans_url = ''
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<AgentSetting: %r>' % self.id
"""
@ 钱包表
role 钱包所有者的角色
0表示普通微信用户 1表示代理商用户
withdrawable_balance 可提现金额
"""
class Wallet(db.Model):
__tablename__ = 'wallet'
id = db.Column(db.Integer, primary_key=True)
role = db.Column(db.Integer, nullable=False, server_default='0')
user_id = db.Column(db.Integer, nullable=False)
agent_id = db.Column(db.Integer, nullable=False)
balance = db.Column(db.Float, nullable=False, server_default='0')
withdrawable_balance = db.Column(db.Float, nullable=False, server_default='0')
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id, role, agent_id):
self.user_id = user_id
self.role = role
self.agent_id = agent_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<Wallet: %r>' % self.id
"""
@ 钱包交易收据表
role 钱包所有者的角色
0表示普通微信用户 1表示代理商用户
trade_type 交易类型(奇数收入,偶数支出)
1. 代理提成收入
2. 代理微信取现
3. 用户充值
4. 用户消费
5. 转账转入
6. 转账转出
7. 用户退款
8. 代理提成退款
payment_no 微信取现单号
默认为'--',当trade_type为2时,该值为微信返回值
receipt 收据,进入钱包余额
withdrawable_receipt 可提现收据,进入钱包可提现余额
"""
WALLET_TRADE_TYPE_TICHENG = 1
WALLET_TRADE_TYPE_WECHAT_WITHDRAW = 2
WALLET_TRADE_TYPE_DEPOSIT = 3
WALLET_TRADE_TYPE_EXPENDITURE = 4
WALLET_TRADE_TYPE_TRANSFER_IN = 5
WALLET_TRADE_TYPE_TRANSFER_OUT = 6
WALLET_TRADE_TYPE_REFUND = 7
WALLET_TRADE_TYPE_REFUND_TICHENG = 8
class WalletReceipt(db.Model):
__tablename__ = 'wallet_receipt'
id = db.Column(db.Integer, primary_key=True)
role = db.Column(db.Integer, nullable=False, server_default='0')
user_id = db.Column(db.Integer, nullable=False)
agent_id = db.Column(db.Integer, nullable=False)
wallet_id = db.Column(db.ForeignKey(u'wallet.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
trade_type = db.Column(db.Integer, nullable=False)
receipt = db.Column(db.Float, nullable=False)
withdrawable_receipt = db.Column(db.Float, nullable=False)
payment_no = db.Column(db.String(64), nullable=False)
remark = db.Column(db.String(128), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, role, user_id, agent_id, wallet_id, trade_type, receipt, withdrawable_receipt, remark, payment_no):
self.role = role
self.user_id = user_id
self.agent_id = agent_id
self.wallet_id = wallet_id
self.trade_type = trade_type
self.receipt = receipt
self.withdrawable_receipt = withdrawable_receipt
self.remark = remark
self.payment_no = payment_no
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<WalletReceipt: id,%r receipt,%r>' % (self.id, self.receipt)
"""
@ 代理提现表
wechat_agent_id 对接的公众号的代理id
"""
class PayToUser(db.Model):
__tablename__ = 'pay_to_user'
id = db.Column(db.Integer, primary_key=True)
wechat_agent_id = db.Column(db.Integer, nullable=False)
openluat_user_id = db.Column(db.Integer, nullable=False)
to_openid = db.Column(db.String(128), nullable=False)
to_nickname = db.Column(db.String(64), nullable=False)
trade_no = db.Column(db.String(24), nullable=False, unique=True, index=True)
total_fee = db.Column(db.Integer, nullable=False)
# fee_rate = db.Column(db.Float, nullable=False, server_default='0.006')
desc = db.Column(db.String(128), nullable=False)
remark = db.Column(db.String(128), nullable=False)
payment_no = db.Column(db.String(64), nullable=False)
notify_res = db.Column(db.Text, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, wechat_agent_id, user_id, to_openid, to_nickname, trade_no,
total_fee, desc, remark):
# def __init__(self, wechat_agent_id, user_id, to_openid, to_nickname, trade_no,
# total_fee, fee_rate, desc, remark):
self.wechat_agent_id = wechat_agent_id
self.openluat_user_id = user_id
self.to_openid = to_openid
self.to_nickname = to_nickname
self.trade_no = trade_no
self.total_fee = total_fee
# self.fee_rate = fee_rate
self.desc = desc
self.remark = remark
self.payment_no = ''
self.notify_res = ''
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<PayToUser: %r>' % self.id
"""
线下投币记录表
time 投币时间
coin 投币金额,单位分
"""
class OfflineCoin(db.Model):
__tablename__ = 'offline_coin'
id = db.Column(db.Integer, primary_key=True)
imei = db.Column(db.String(32), nullable=False, index=True)
time = db.Column(db.DateTime, nullable=False)
coin = db.Column(db.Integer, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, imei, time, coin):
self.imei = imei
self.time = time
self.coin = coin
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<OfflineCoin: %r>' % self.id
# """
# mqtt异步缓存表
# status 状态
# 0 mqtt未返回
# 1 mqtt返回成功,http返回成功
# 2 mqtt返回失败
# 3 http返回失败
# """
# class MqttAsync(db.Model):
# __tablename__ = ''
# id = db.Column(db.Integer, primary_key=True)
# greenlet_id = db.Column(db.Integer, nullable=False, index=True)
# http_url = db.Column(db.String(128), nullable=False)
# mqtt_res = db.Column(db.Text, nullable=False)
# http_res = db.Column(db.Text, nullable=False)
# ctime = db.Column(db.DateTime, nullable=False)
# utime = db.Column(db.DateTime, nullable=False)
# status = db.Column(db.Integer, nullable=False, server_default="0")
# def __init__(self, greenlet_id, http_url):
# self.greenlet_id = greenlet_id
# self.http_url = http_url
# self.mqtt_res = ''
# self.http_res = ''
# now = datetime.now()
# self.ctime = now
# self.utime = now
# def __repr__(self):
# return '<MqttAsync: %r>' % self.id
"""
@ 管理员用户表
role 1管理员,2超级管理员,唯一
"""
class God(db.Model):
__tablename__ = 'god'
id = db.Column(db.Integer, primary_key=True)
openluat_user_id = db.Column(db.Integer, nullable=False)
role = db.Column(db.Integer, nullable=False)
name = db.Column(db.String(16), nullable=False)
phone = db.Column(db.String(32), nullable=False, unique=True)
email = db.Column(db.String(64), nullable=False, unique=True)
remark = db.Column(db.String(128), nullable=False, server_default="")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, openluat_user_id, role, name, phone, email, remark):
self.openluat_user_id = openluat_user_id
self.role = role
self.name = name
self.phone = phone
self.email = email
self.remark = remark
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<God: %r>' % self.id
class GodInfo(db.Model):
__tablename__ = 'god_info'
id = db.Column(db.Integer, primary_key=True)
god_id = db.Column(db.ForeignKey(u'god.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
logo = db.Column(db.String(128), nullable=False, server_default=config.DEFAULT_LOGO)
title = db.Column(db.String(32), nullable=False, server_default='码夫支付管理系统')
ctime = db.Column(db.DateTime, nullable=False, server_default=func.now())
utime = db.Column(db.DateTime, nullable=False, server_default=func.now(), onupdate=func.now())
status = db.Column(db.Integer, nullable=False, server_default='0')
def __init__(self, god_id, logo, title):
self.god_id = god_id
self.logo = logo
self.title = title
def __repr__(self):
return '<GodInfo for god: %r>' % self.god_id
class GodAgent(db.Model):
__tablename__ = 'god_agent'
id = db.Column(db.Integer, primary_key=True)
god_id = db.Column(db.Integer, nullable=False)
agent_id = db.Column(db.Integer, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, god_id, agent_id):
self.god_id = god_id
self.agent_id = agent_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<GodAgent: %r>' % self.id
class WechatConfig(db.Model):
__tablename__ = 'wechat_config'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(16), nullable=False)
appid = db.Column(db.String(32), nullable=False)
appsecret = db.Column(db.String(64), nullable=False)
mchid = db.Column(db.String(32), nullable=False)
mchkey = db.Column(db.String(64), nullable=False)
redirecturl = db.Column(db.String(128), nullable=False)
redirect_bind_url = db.Column(db.String(128), nullable=False)
key_path = db.Column(db.String(128), nullable=False)
qrcode_img = db.Column(db.String(128), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, name, appid, appsecret, mchid, mchkey, redirecturl, redirect_bind_url, key_path, qrcode_img):
self.name = name
self.appid = appid
self.appsecret = appsecret
self.mchid = mchid
self.mchkey = mchkey
self.redirecturl = redirecturl
self.redirect_bind_url = redirect_bind_url
self.key_path = key_path
self.qrcode_img = qrcode_img
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<WechatConfig: %r>' % self.id
class AgentWechat(db.Model):
__tablename__ = 'agent_wechat'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
wechat_config_id = db.Column(db.ForeignKey(u'wechat_config.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, agent_id, wechat_config_id):
self.agent_id = agent_id
self.wechat_config_id = wechat_config_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<AgentWechat: %r>' % self.id
class WechatBind(db.Model):
__tablename__ = 'wechat_bind'
id = db.Column(db.Integer, primary_key=True)
admin_uid = db.Column(db.Integer, unique=True, nullable=False)
expires_at = db.Column(db.DateTime, nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, admin_uid, expires_at):
self.admin_uid = admin_uid
self.expires_at = expires_at
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<WechatBind: %r>' % self.id
class AliUser(db.Model):
__tablename__ = 'ali_user'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.ForeignKey(u'mafu_user.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
user_id_a = db.Column(db.String(64), nullable=False, unique=True)
ali_user_id = db.Column(db.String(64), nullable=False, unique=True)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id, user_id_a, ali_user_id):
self.user_id = user_id
self.user_id_a = user_id_a
self.ali_user_id = ali_user_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<AliUser: %r>' % self.id
class AliConfig(db.Model):
__tablename__ = 'ali_config'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(16), nullable=False)
appid = db.Column(db.String(32), nullable=False)
priv_path = db.Column(db.String(128), nullable=False)
pub_path = db.Column(db.String(128), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, name, appid, priv_path, pub_path):
self.name = name
self.appid = appid
self.priv_path = priv_path
self.pub_path = pub_path
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<AliConfig: %r>' % self.id
class AgentAli(db.Model):
__tablename__ = 'agent_ali'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
ali_config_id = db.Column(db.ForeignKey(u'ali_config.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, agent_id, ali_config_id):
self.agent_id = agent_id
self.ali_config_id = ali_config_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<AgentAli: %r>' % self.id
class UserStartCounter(db.Model):
__tablename__ = 'user_start_counter'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.ForeignKey(u'mafu_user.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
imei = db.Column(db.String(32), nullable=False)
count = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, user_id, imei):
self.user_id = user_id
self.imei = imei
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<UserStartCounter: %r>' % self.id
class Advertisement(db.Model):
__tablename__ = 'advertisement'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.ForeignKey(u'agent.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
name = db.Column(db.String(16), nullable=False)
desc = db.Column(db.String(64), nullable=False)
img = db.Column(db.String(128), nullable=False)
url = db.Column(db.String(128), nullable=False)
using = db.Column(db.Integer, nullable=False, server_default="0")
deleted = db.Column(db.Integer, nullable=False, server_default='0')
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, agent_id, name, desc, img, url):
self.agent_id = agent_id
self.name = name
self.desc = desc
self.img = img
self.url = url
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<Advertisement: %r>' % self.id
class DeviceAd(db.Model):
__tablename__ = 'device_advertisement'
id = db.Column(db.Integer, primary_key=True)
device_id = db.Column(db.ForeignKey(u'device.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
ad_id = db.Column(db.ForeignKey(u'advertisement.id', ondelete=u'CASCADE', onupdate=u'CASCADE'), nullable=False)
ctime = db.Column(db.DateTime, nullable=False)
utime = db.Column(db.DateTime, nullable=False)
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, device_id, ad_id):
self.device_id = device_id
self.ad_id = ad_id
now = datetime.now()
self.ctime = now
self.utime = now
def __repr__(self):
return '<DeviceAd: %r>' % self.id
class Advertiser(db.Model):
__tablename__ = 'advertiser'
id = db.Column(db.Integer, primary_key=True)
openluat_user_id = db.Column(db.Integer, nullable=False)
hook_agent_id = db.Column(db.Integer, nullable=False)
name = db.Column(db.String(32), nullable=False)
phone = db.Column(db.String(24), nullable=False)
email = db.Column(db.String(64), nullable=False)
desc = db.Column(db.String(128), nullable=False)
address = db.Column(db.String(128), nullable=False)
remark = db.Column(db.String(128), nullable=False)
ctime = db.Column(db.DateTime, nullable=False, server_default=func.now())
utime = db.Column(db.DateTime, nullable=False, server_default=func.now(), onupdate=func.now())
status = db.Column(db.Integer, nullable=False, server_default="0")
def __init__(self, openluat_user_id, hook_agent_id, name, phone, email, desc, address, remark):
self.openluat_user_id = openluat_user_id
self.hook_agent_id = hook_agent_id
self.name = name
self.phone = phone
self.email = email
self.desc = desc
self.address = address
self.remark = remark
def __repr__(self):
return '<Advertiser: %r>' % self.id
class Coupon(db.Model):
__tablename__ = 'coupon'
id = db.Column(db.Integer, primary_key=True)
agent_id = db.Column(db.Integer, nullable=False)
advertiser_id = db.Column(db.ForeignKey('advertiser.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False)
product_id = db.Column(db.ForeignKey('product.id', ondelete='CASCADE', onupdate='CASCADE'))
pay_id = db.Column(db.ForeignKey('pay.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False)
title = db.Column(db.String(32), nullable=False)
desc = db.Column(db.String(64), nullable=False)
total = db.Column(db.Integer, nullable=False)
prefix = db.Column(db.String(10), nullable=False, unique=True)
img = db.Column(db.String(128), nullable=False)
paid = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False, server_default=func.now())
utime = db.Column(db.DateTime, nullable=False, server_default=func.now(), onupdate=func.now())
status = db.Column(db.Integer, nullable=False, server_default='0')
def __init__(self, agent_id, advertiser_id, product_id, pay_id, title, desc, prefix, total, img):
self.agent_id = agent_id
self.advertiser_id = advertiser_id
self.product_id = product_id
self.pay_id = pay_id
self.title = title
self.desc = desc
self.prefix = prefix
self.total = total
self.img = img
def __repr__(self):
return '<Coupon: %r>' % self.id
class DeviceCoupon(db.Model):
__tablename__ = 'device_coupon'
id = db.Column(db.Integer, primary_key=True)
coupon_id = db.Column(db.ForeignKey('coupon.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False)
device_id = db.Column(db.ForeignKey('device.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False)
ctime = db.Column(db.DateTime, nullable=False, server_default=func.now())
utime = db.Column(db.DateTime, nullable=False, server_default=func.now(), onupdate=func.now())
status = db.Column(db.Integer, nullable=False, server_default='0')
def __init__(self, coupon_id, device_id):
self.coupon_id = coupon_id
self.device_id = device_id
def __repr__(self):
return '<DeviceCoupon: %r>' % self.id
class CouponReceipt(db.Model):
__tablename__ = 'coupon_receipt'
id = db.Column(db.Integer, primary_key=True)
coupon_id = db.Column(db.ForeignKey("coupon.id", ondelete="CASCADE", onupdate="CASCADE"), nullable=False)
user_id = db.Column(db.Integer, nullable=False, server_default='0')
code = db.Column(db.String(32), nullable=False, index=True, unique=True)
started = db.Column(db.Integer, nullable=False, server_default="0")
used = db.Column(db.Integer, nullable=False, server_default="0")
ctime = db.Column(db.DateTime, nullable=False, server_default=func.now())
utime = db.Column(db.DateTime, nullable=False, server_default=func.now(), onupdate=func.now())
status = db.Column(db.Integer, nullable=False, server_default='0')
def __init__(self, coupon_id, user_id, code):
self.coupon_id = coupon_id
self.user_id = user_id
self.code = code
def __repr__(self):
return '<CouponReceipt: %r>' % self.code
| [
"zacon365@gmail.com"
] | zacon365@gmail.com |
ce79c51c61434ab88a8e781669db0c84c32eb6de | 253fcf11f4c5d78d99123eac929a36e397270f48 | /day5/day-5-2.py | f0032d17b9dc6850b74aaea6b4761909411c14e8 | [] | no_license | TheShrug/Advent-of-Code | 1074a24f3dba861fde0727d420b79428267ad067 | 62c3d035f24560ed14fde877d5aeff2a38abf3dd | refs/heads/master | 2020-04-09T15:13:13.258338 | 2019-02-28T16:56:38 | 2019-02-28T16:56:38 | 160,416,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,478 | py | def fully_react_polymer(polymer_string):
string = polymer_string
current_index = 0
while current_index < string.__len__():
if current_index != string.__len__() - 1:
if string[current_index].islower():
if string[current_index + 1].isupper() and string[current_index + 1] == string[current_index].upper():
string = string[:current_index] + string[current_index + 2:]
current_index -= 2
if string[current_index].isupper():
if string[current_index + 1].islower() and string[current_index + 1] == string[current_index].lower():
string = string[:current_index] + string[current_index + 2:]
current_index -= 2
current_index += 1
return string
full_polymer = open("input.txt").read()
alphabet = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
char_list = []
for index, letter in enumerate(alphabet):
polymer = full_polymer.replace(letter.lower(), '')
polymer = polymer.replace(letter.upper(), '')
fully_reacted_polymer = fully_react_polymer(polymer)
char_list.append([letter, fully_reacted_polymer.__len__()])
shortest_polymer = ['', 999999999]
for character in char_list:
if character[1] < shortest_polymer[1]:
shortest_polymer = character
print("length of the shortest polymer", shortest_polymer[1])
| [
"stewart.m.gordon@gmail.com"
] | stewart.m.gordon@gmail.com |
6db6663f7cee2d709649d0b6d0edd779bd94baec | 6ae8c2695d2f1c001af4bd7f7b61a1532e765a45 | /home/migrations/0001_initial.py | 8f978831311e500a686d5822330e1b0e810f925d | [] | no_license | sandro-pasquali/nrmp-django | 6fa1962d9107788fbd17c6cfc03fa19e0b7c79bc | 91f4b495bd6ed1024788bc36c7f18023f3559f8a | refs/heads/master | 2021-01-25T08:54:33.094578 | 2012-08-31T04:49:30 | 2012-08-31T04:49:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,923 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Candidate'
db.create_table('home_candidate', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal('home', ['Candidate'])
# Adding model 'Program'
db.create_table('home_program', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal('home', ['Program'])
# Adding model 'CandidateRanklist'
db.create_table('home_candidateranklist', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('candidate', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['home.Candidate'])),
('program_rank', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('home', ['CandidateRanklist'])
# Adding M2M table for field program on 'CandidateRanklist'
db.create_table('home_candidateranklist_program', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('candidateranklist', models.ForeignKey(orm['home.candidateranklist'], null=False)),
('program', models.ForeignKey(orm['home.program'], null=False))
))
db.create_unique('home_candidateranklist_program', ['candidateranklist_id', 'program_id'])
# Adding model 'ProgramRanklist'
db.create_table('home_programranklist', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('program', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['home.Program'])),
('candidate_rank', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('home', ['ProgramRanklist'])
# Adding M2M table for field candidate on 'ProgramRanklist'
db.create_table('home_programranklist_candidate', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('programranklist', models.ForeignKey(orm['home.programranklist'], null=False)),
('candidate', models.ForeignKey(orm['home.candidate'], null=False))
))
db.create_unique('home_programranklist_candidate', ['programranklist_id', 'candidate_id'])
def backwards(self, orm):
# Deleting model 'Candidate'
db.delete_table('home_candidate')
# Deleting model 'Program'
db.delete_table('home_program')
# Deleting model 'CandidateRanklist'
db.delete_table('home_candidateranklist')
# Removing M2M table for field program on 'CandidateRanklist'
db.delete_table('home_candidateranklist_program')
# Deleting model 'ProgramRanklist'
db.delete_table('home_programranklist')
# Removing M2M table for field candidate on 'ProgramRanklist'
db.delete_table('home_programranklist_candidate')
models = {
'home.candidate': {
'Meta': {'object_name': 'Candidate'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'home.candidateranklist': {
'Meta': {'object_name': 'CandidateRanklist'},
'candidate': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['home.Candidate']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'program': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['home.Program']", 'symmetrical': 'False'}),
'program_rank': ('django.db.models.fields.IntegerField', [], {})
},
'home.program': {
'Meta': {'object_name': 'Program'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'home.programranklist': {
'Meta': {'object_name': 'ProgramRanklist'},
'candidate': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['home.Candidate']", 'symmetrical': 'False'}),
'candidate_rank': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'program': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['home.Program']"})
}
}
complete_apps = ['home'] | [
"ozandlb@gmail.com"
] | ozandlb@gmail.com |
655ff647c8559dc41793aa823380c655f5c10e9b | ebd3106842ac51a500898c2618e1c292125b57c3 | /models/shakeshake.py | d404871c21949a776be080587a760516f82acfe9 | [
"MIT"
] | permissive | SURF-2021/ups | 9593489a3b5194f45e1013bcdfbd1eafbf6a99b3 | b8a1d33326d3b65f2ae9b3554d1b42505b263ae7 | refs/heads/main | 2023-07-07T14:48:54.590409 | 2021-08-13T13:06:46 | 2021-08-13T13:06:46 | 398,540,312 | 0 | 0 | MIT | 2021-08-21T11:27:17 | 2021-08-21T11:27:16 | null | UTF-8 | Python | false | false | 11,443 | py | # Copyright (c) 2018, Curious AI Ltd. All rights reserved.
#
# This work is licensed under the Creative Commons Attribution-NonCommercial
# 4.0 International License. To view a copy of this license, visit
# http://creativecommons.org/licenses/by-nc/4.0/ or send a letter to
# Creative Commons, PO Box 1866, Mountain View, CA 94042, USA.
import sys
import math
import itertools
import torch
from torch import nn
from torch.nn import functional as F
from torch.autograd import Variable, Function
# import torch.utils.model_zoo as model_zoo
class ResNet224x224(nn.Module):
def __init__(self, block, layers, channels, groups=1, num_classes=1000, downsample='basic'):
super().__init__()
assert len(layers) == 4
self.downsample_mode = downsample
self.inplanes = 64
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, channels, groups, layers[0])
self.layer2 = self._make_layer(
block, channels * 2, groups, layers[1], stride=2)
self.layer3 = self._make_layer(
block, channels * 4, groups, layers[2], stride=2)
self.layer4 = self._make_layer(
block, channels * 8, groups, layers[3], stride=2)
self.avgpool = nn.AvgPool2d(7)
self.fc = nn.Linear(block.out_channels(
channels * 8, groups), num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, groups, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != block.out_channels(planes, groups):
if self.downsample_mode == 'basic' or stride == 1:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, block.out_channels(planes, groups),
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(block.out_channels(planes, groups)),
)
elif self.downsample_mode == 'shift_conv':
downsample = ShiftConvDownsample(in_channels=self.inplanes,
out_channels=block.out_channels(planes, groups))
else:
assert False
layers = []
layers.append(block(self.inplanes, planes, groups, stride, downsample))
self.inplanes = block.out_channels(planes, groups)
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return self.fc(x)
class ResNet32x32(nn.Module):
def __init__(self, block, layers, channels, groups=1, downsample='basic', num_classes=10, dropout=0.3):
super().__init__()
assert len(layers) == 3
self.downsample_mode = downsample
self.inplanes = 16
self.dropout = dropout
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1,
padding=1, bias=False)
self.layer1 = self._make_layer(block, channels, groups, layers[0])
self.layer2 = self._make_layer(
block, channels * 2, groups, layers[1], stride=2)
self.layer3 = self._make_layer(
block, channels * 4, groups, layers[2], stride=2)
self.avgpool = nn.AvgPool2d(8)
self.fc = nn.Linear(block.out_channels(
channels * 4, groups), num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, groups, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != block.out_channels(planes, groups):
if self.downsample_mode == 'basic' or stride == 1:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, block.out_channels(planes, groups),
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(block.out_channels(planes, groups)),
)
elif self.downsample_mode == 'shift_conv':
downsample = ShiftConvDownsample(in_channels=self.inplanes,
out_channels=block.out_channels(planes, groups))
else:
assert False
layers = []
layers.append(block(self.inplanes, planes, groups, stride, downsample, dropout=self.dropout))
self.inplanes = block.out_channels(planes, groups)
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups, dropout=self.dropout))
return nn.Sequential(*layers)
def forward(self, x, randomness=0, mini_batch=1):
x = self.conv1(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return self.fc(x)
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BottleneckBlock(nn.Module):
@classmethod
def out_channels(cls, planes, groups):
if groups > 1:
return 2 * planes
else:
return 4 * planes
def __init__(self, inplanes, planes, groups, stride=1, downsample=None):
super().__init__()
self.relu = nn.ReLU(inplace=True)
self.conv_a1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn_a1 = nn.BatchNorm2d(planes)
self.conv_a2 = nn.Conv2d(
planes, planes, kernel_size=3, stride=stride, padding=1, bias=False, groups=groups)
self.bn_a2 = nn.BatchNorm2d(planes)
self.conv_a3 = nn.Conv2d(planes, self.out_channels(
planes, groups), kernel_size=1, bias=False)
self.bn_a3 = nn.BatchNorm2d(self.out_channels(planes, groups))
self.downsample = downsample
self.stride = stride
def forward(self, x):
a, residual = x, x
a = self.conv_a1(a)
a = self.bn_a1(a)
a = self.relu(a)
a = self.conv_a2(a)
a = self.bn_a2(a)
a = self.relu(a)
a = self.conv_a3(a)
a = self.bn_a3(a)
if self.downsample is not None:
residual = self.downsample(residual)
return self.relu(residual + a)
class ShakeShakeBlock(nn.Module):
@classmethod
def out_channels(cls, planes, groups):
assert groups == 1
return planes
def __init__(self, inplanes, planes, groups, stride=1, downsample=None, dropout=0.3):
super().__init__()
assert groups == 1
self.conv_a1 = conv3x3(inplanes, planes, stride)
self.bn_a1 = nn.BatchNorm2d(planes)
self.conv_a2 = conv3x3(planes, planes)
self.bn_a2 = nn.BatchNorm2d(planes)
self.conv_b1 = conv3x3(inplanes, planes, stride)
self.bn_b1 = nn.BatchNorm2d(planes)
self.conv_b2 = conv3x3(planes, planes)
self.bn_b2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
self.dropout = dropout
if self.dropout > 0:
self.drop_a = nn.Dropout(p=self.dropout)
self.drop_b = nn.Dropout(p=self.dropout)
# self.drop = nn.Dropout2d(self.dropout)
def forward(self, x):
a, b, residual = x, x, x
a = F.relu(a, inplace=False)
a = self.conv_a1(a)
a = self.bn_a1(a)
a = F.relu(a, inplace=True)
if self.dropout > 0:
a = self.drop_a(a)
a = self.conv_a2(a)
a = self.bn_a2(a)
b = F.relu(b, inplace=False)
b = self.conv_b1(b)
b = self.bn_b1(b)
b = F.relu(b, inplace=True)
if self.dropout > 0:
b = self.drop_b(b)
b = self.conv_b2(b)
b = self.bn_b2(b)
ab = shake(a, b, training=self.training)
if self.downsample is not None:
residual = self.downsample(x)
return residual + ab
class Shake(Function):
@classmethod
def forward(cls, ctx, inp1, inp2, training):
assert inp1.size() == inp2.size()
gate_size = [inp1.size()[0], *itertools.repeat(1, inp1.dim() - 1)]
gate = inp1.new(*gate_size)
if training:
gate.uniform_(0, 1)
else:
gate.fill_(0.5)
return inp1 * gate + inp2 * (1. - gate)
@classmethod
def backward(cls, ctx, grad_output):
grad_inp1 = grad_inp2 = grad_training = None
gate_size = [grad_output.size()[0], *itertools.repeat(1,
grad_output.dim() - 1)]
gate = Variable(grad_output.data.new(*gate_size).uniform_(0, 1))
if ctx.needs_input_grad[0]:
grad_inp1 = grad_output * gate
if ctx.needs_input_grad[1]:
grad_inp2 = grad_output * (1 - gate)
assert not ctx.needs_input_grad[2]
return grad_inp1, grad_inp2, grad_training
def shake(inp1, inp2, training=False):
return Shake.apply(inp1, inp2, training)
class ShiftConvDownsample(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.relu = nn.ReLU(inplace=True)
self.conv = nn.Conv2d(in_channels=2 * in_channels,
out_channels=out_channels,
kernel_size=1,
groups=2)
self.bn = nn.BatchNorm2d(out_channels)
def forward(self, x):
x = torch.cat((x[:, :, 0::2, 0::2],
x[:, :, 1::2, 1::2]), dim=1)
x = self.relu(x)
x = self.conv(x)
x = self.bn(x)
return x
def shakeshake_resnet(num_classes=10, dropout=0.3, pretrained=False, progress=True, **kwargs):
return ResNet32x32(ShakeShakeBlock,
layers=[4, 4, 4],
channels=96,
downsample='shift_conv', num_classes=num_classes, dropout=dropout)
if __name__ == "__main__":
model = ResNet32x32(ShakeShakeBlock,
layers=[4, 4, 4],
channels=96,
downsample='shift_conv')
x = torch.randn(1,3,32,32)
y, feature = model(x)
# y.shape = [1, 10]
# feature.shape = [1, 512]
# import IPython
# IPython.embed()
# print(y.size())
| [
"nayeemrizve@gmail.com"
] | nayeemrizve@gmail.com |
c8050899cf89ee9924b87c4ca03e3b2303d99a97 | e73b062ddf811fb1758f58dd8da4b50868e8b85d | /shaktimaan/bashmenu/menu_decrypt | 5db5be318f44a69e24246b196634da0f752fd217 | [] | no_license | coflin/Intrusion-Detection-System | aaf046a4545e628dc4bbd8b7bfdf5150a40f34f7 | 76c2f0be80ccf0d80a57185f1a30ed57133d0b6d | refs/heads/master | 2020-04-27T18:29:08.719180 | 2019-03-11T14:30:34 | 2019-03-11T14:30:34 | 174,572,474 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,029 | #!/usr/bin/python
import os
print "*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-"
print "Note that only the .bsv files will be decrypted"
print "*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-"
def decrypt(filename):
f=open(filename,"r")
filename1=filename[:-4]
f1=open(filename1,"w")
message=[]
original=[]
for char in f.read():
message.append(char)
i=len(message)-1
while i>0:
original.append(message[i])
i=i-1
for orichar in original:
if(ord(orichar)==145):
f1.write(str(' '))
else:
f1.write(str(chr(ord(orichar)-23)))
os.system("rm "+filename)
os.system("chmod 755 "+filename1)
f.close()
f1.close()
def sh(path):
for root,dirs,files in os.walk(path,topdown=True):
for i in files:
if(i[-4:] == ".bsv"):
decrypt(os.path.abspath(os.path.join(root,i)))
path= raw_input("Enter the path : ")
if os.path.exists(path):
sh(path)
print "All files in "+path+" has been decrypted"
else:
print "File doesnt exists"
| [
"noreply@github.com"
] | noreply@github.com | |
5dbc661f755795eb4062633fde62f23ccdd77609 | 2a6b3f08c637e2c0599c1b3542f5cc3c4720e1e7 | /py/Djangooo/products/models.py | 1dbba5d69e4c456680bfbbf470c05398655078aa | [] | no_license | TheSleepingAssassin/CodeFolder | 481dfb75368c9366e29a1b9084d3a171deece333 | e7058f8b757e924995da359f99e702a9c8adf56f | refs/heads/main | 2023-06-20T14:39:41.249833 | 2021-07-12T11:15:26 | 2021-07-12T11:15:26 | 385,210,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | from django.db import models
class Product(models.Model):
name = models.CharField(max_length=255)
price = models.FloatField()
stock = models.IntegerField()
imageUrl = models.CharField(max_length=2083)
| [
"premmilindnaik2007@gmail.com"
] | premmilindnaik2007@gmail.com |
9c3c6cf93bb864ea24eb88b611b0e18ec6f8254f | cd09c484dab7fd835ec7da37355ca18796f7d04b | /bd6final.py | 0ecb1038f9cb805d3cb88c8403516a299100d6db | [] | no_license | Jcgo3003/Funtions-Python | 01f16444169772d68a313f7463c1cc9e81c64c48 | 2e29abe7f1b3a050e06299a5224dfd90fe06be34 | refs/heads/master | 2020-05-14T15:56:08.019616 | 2019-04-17T12:49:25 | 2019-04-17T12:49:25 | 181,863,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,055 | py | # Librerias
import sqlite3
import time
import os.path
from datetime import datetime, timedelta
# Creando nombre para la BD apartir de la fecha
nombrebd = time.strftime('%d%m%y',time.localtime()) + ".db"
# Lista que se encarga de contar el numero de rondas registradas en la BD
ron_a = [ 0 , 0 , 0 ]
ron_b = [ 0 , 0 , 0 ]
# --------------------------------------------------------------------- #
################ Comienzo - Creando/Leyendo archivo de BD ###############
# --------------------------------------------------------------------- #
if (os.path.isfile(nombrebd)):
print("BD existente")
# Conectando la BD
conexion = sqlite3.connect(nombrebd)
print ("Leyendo BD: ", nombrebd)
# Cursor
c = conexion.cursor()
### Recuperando datos de la DB direccion A #####
# sen1 - selecciona la columna sen1_a - asigna bd toda esa columna que se guarda como lista - asigna el # de longitud de esa lista
c.execute("SELECT sen1_a FROM a")
bd = c.fetchall()
ron_a[0] = len(bd)
# sen2 - hace los mismo para la columna de sen2 con algunas diferencias para eliminar las entradas con 'NONE'
c.execute("SELECT sen2_a FROM a")
bd = c.fetchall()
# filtrando las entradas para quitar "NONE" de la lista de entradas
cuenta = 0
for i in range (len(bd)):
if ( str(bd[i][0]) != 'None'):
cuenta += 1
# asigna cuenta a ron_a[1]
ron_a[1] = cuenta
# sen3 - Utiliza el mismo metodo para contar en esta columna
c.execute("SELECT sen3_a FROM a")
bd = c.fetchall()
cuenta = 0
for i in range (len(bd)):
if ( str(bd[i][0]) != 'None'):
cuenta += 1
ron_a[2] = cuenta
### Recuperando datos de la DB para la vuelta direccion B ##### Hace exactamente lo mismo pero para las columnas en B
# sen3
c.execute("SELECT sen3_b FROM b")
bd = c.fetchall()
ron_b[2] = len(bd)
# sen2
c.execute("SELECT sen2_b FROM b")
bd = c.fetchall()
cuenta = 0
for i in range (len(bd)):
if ( str(bd[i][0]) != 'None'):
cuenta += 1
ron_b[1] = cuenta
# sen1
c.execute("SELECT sen1_b FROM b")
bd = c.fetchall()
cuenta = 0
for i in range (len(bd)):
if ( str(bd[i][0]) != 'None'):
cuenta += 1
ron_b[0] = cuenta
# En este punto ya tenemos las listas de rondas con la cuenta exacta de la ultima entrada
# De esta manera se reanundan los registros en donde se quedaron
else:
# En caso de que no exista la base de datos crea una completamente vacia
print("Creando BD ...")
# Creando BD
conexion = sqlite3.connect(nombrebd)
print ("Base de datos creada: ", nombrebd)
# Cursor para la bd
c = conexion.cursor()
# creando tablas
# Crea tabla 'a' con 'num_a' - registro de rondas, Sen1_a - hora de registro, Sen2_a - hora de registro, Sen3_a - hora de registro.
c.execute(''' CREATE TABLE "a" ('num_a' INTEGER PRIMARY KEY , 'sen1_a' TIME, 'sen2_a' TIME, 'sen3_a' TIME )''')
# Crea tabla 'b' con 'num_b' - registro de rondas, Sen1_b - hora de registro, Sen2_b - hora de registro, Sen3_b - hora de registro.
c.execute(''' CREATE TABLE "b" ('num_b' INTEGER PRIMARY KEY , 'sen3_b' TIME, 'sen2_b' TIME, 'sen1_b' TIME )''')
# --------------------------------------------------------------------- #
################ MENU de introduccion de datos #########################
# --------------------------------------------------------------------- #
# Var. para guardar respuesta de usuario
res = input('¿Comenzar?...')
# Muestra lo que sera el menu inicial de la BD
while ( res == 'y'):
print('Las vueltas comienzan asi A', ron_a)
print('Las vueltas comienzan asi B', ron_b)
print(" Menu principal ")
# Direccion se guarda en dir
dir = input("Direccion: ")
## Sensor guarda el nombre completo del sensor sen1_a
sensor = input("Sensor numero: ")# ejem... sen1_a
# num_sen guarda el numero de ese sensor
num_sen = int(input("Numero: "))
print("...")
# --------------------------------------------------------------------- #
################ Introduccion de datos a la DB# #########################
# --------------------------------------------------------------------- #
# Seccion para insertar los datos en la bd 'a' ...
if ( dir == 'a'):
# Crendo un nueva linea en la BD, por ser el sensor 1 se crea una nueva linea en la bd
if (num_sen == 1 ):
# Insertando tiempos a sen1
print("Guardando sen1 Direccion A")
# sumando +1 al contador de rondas
ron_a[0] += 1
# obteniendo la hora exacta de ese momento para guardarla en hora_now
hora_now = (time.strftime('%H:%M:%S',time.localtime()))
# introduciendo el # de ronda en 'num_a', en la en columna sen1_a, en las demas 'NONE'
c.execute('''INSERT INTO a ("num_a","sen1_a","sen2_a","sen3_a") VALUES ( ? , ? ,NULL,NULL)''', (ron_a[0], hora_now) )
else:
# Si num_sen dif. de 1, se tiene que actualizar una linea nueva
print("Guardando sen", num_sen)
print("Direccion A")
# se resta 1 la numero del sensor para hace que corresponda con numero de sitio de la lista de rondas
num_sen -= 1
ron_a[num_sen] += 1
# Se obtiene la hora actual y se guarda en hora_now
hora_now = (time.strftime('%H:%M:%S',time.localtime()))
# Actualiza los datos, donde estaba 'NONE' ahora estara el valor de hora_now
# Utiliza sensor - columna, hora_now para el registro, ron_a[num_sen] para colocarlo en la linea correcta
c.execute('''UPDATE a SET {} = ? where num_a = ? '''.format( sensor), (hora_now, ron_a[num_sen] ) )
# Seccion para insertar los datos en la bd 'b' ...
else:
# Se utiliza el mismo metodo diferente orden al ser el sensor 3 el que empieza, porque el sensor estara
# Colocado en la ultima estacion de dir. 'a', haciendo que sensor 3 sea la primera en activarse y crea la linea
if (num_sen == 3 ):
print("Guardando sen3 Direccion B")
# Agrega +1 a su cuenta de rondas de nuevo al ser sen3 el primero de dir. 'b' la lista se actulizara de derecha a izquierda
ron_b[2] += 1
# obteniendo la hora actual y guardandola en hora_now
hora_now = (time.strftime('%H:%M:%S',time.localtime()))
# Creando la nueva linea
c.execute('''INSERT INTO b ("num_b","sen3_b","sen2_b","sen1_b") VALUES ( ? , ? ,NULL,NULL)''', (ron_b[2], hora_now) )
else:
# Al igual que en 'a' aqui actualizara los datos de esas columnas pasando los 'NONE's a registros de tiempo
print("Guardando sen", num_sen)
print("Direccion B")
# para que las rondas se guarden correctamente
num_sen -= 1
# Sumando +1 a la lista de rondas 'b'
ron_b[num_sen] += 1
# obteniendo la hora acutal
hora_now = (time.strftime('%H:%M:%S',time.localtime()))
# Actulizando los datos utilizando el num. de rondas para colocarlo en su sitio correctamente, hora_now para hacer el registro y num_sen para su columna
c.execute('''UPDATE b SET {} = ? where num_b = ? '''.format(sensor), (hora_now, ron_b[num_sen] ) )
# Con esta linea se guardan los registros definitivamente el la bd
conexion.commit()
# Regresa de nuevo hasta arriva para comenzar de nuevo con otro registro
res = input("Quieres continuar: ")
# Imprime el estado de la vueltas
print()
print('Las vueltas terminan asi A', ron_a)
print('Las vueltas terminan asi B', ron_b)
print()
#-----------------------------------------------------------#
############ Promedios de los tiempos por columna ###########
#-----------------------------------------------------------#
# Limitando numero de muestra a la ultima linea registrada totalmente / ron_a[2] por que es el ultimo en recibir senal, inicio sera dado por el
# si hubo mas de 5 vueltas, guarda en ini_a el numero de la linea final - 4 lugares
if (ron_a[2] > 5):
ini_a = ron_a[2] - 4
else:
# si no hubo mas de 5 vueltas comienza desde el registro 1
ini_a = 1
# Mismo proceso para dir. 'b'
if (ron_b[0] > 5):
# si hubo mas de 5 vueltas/ - 4 para que sea un muestra de 5
ini_b = ron_b[0] - 4
else:
ini_b = 1
# Lista de sensores con sus nombres para iterar con ellos
l_sen_a = [ 'sen1_a', 'sen2_a', 'sen3_a']
l_sen_b = [ 'sen1_b', 'sen2_b', 'sen3_b']
# lista_col guardara los tiempos obtenidos de una columna en particular
lista_col = []
# lista de tiempos despues de la sustracion t2 - t1, t3 - t2 , etc.
lista_t = []
#promedios
pro_a = []
pro_b = []
# valores para delta time, hora, minuto, segundos
FMT = '%H:%M:%S'
### iterando sobre sensores ###
# Direccion a
# Para n en un rango de longitun de la lista de sensoresl_sen_a
for n in range (len(l_sen_a)):
# Leyendo la ultima fila en columna l_sen_a [ n ]
# para i en un rango de (inicia en el valor de ini_a), ( y termina en el numero de ronda del ultimo sensor mas 1 ) - mas 1 por que los for terminan en un numero antes del limite, es decir si pongo 11 terminara en 10
for i in range ( ini_a ,ron_a[2] +1 ):
# Seleciona la columna del sensor utilizando a n para obtener de l_sen_a el nombre de la columna, e i para el numero de linea del a bd
c.execute('SELECT {} FROM a WHERE num_a = ?'.format( l_sen_a[n]), ( i , ))
# Guarda en bd todos los datos de esa columna
bd = c.fetchone()
# A lista_col agrega el valor obtenido de esa linea y esa columna en particular, b[0] porque bd es una lista
lista_col.append( bd[0] )
# sacando promedio de la lista - menos 1 porque se haran los promedios entre los tiempos de una columna, si son 10 datos seran 9 restas
for y in range (len(lista_col) -1):
# Obteniendo la sustracion-resta de t2 - t1 para cada valor de lista_col
tdel = datetime.strptime(lista_col[y+1], FMT) - datetime.strptime(lista_col[y], FMT)
# Guardando el valor en segundos
t_float = tdel.total_seconds()
# Guardando el valor en la lista_t
lista_t.append( t_float )
# guardando el promedio de cada columna en la lista de promedios
pro_a.append( sum(lista_t)/ len(lista_t) )
#borrando la listas para que este vacias para la proxima columna y sus datos
lista_col.clear()
lista_t.clear()
# Imprime la lista con los valor promedio que cada columna tiene
print('Promedios finales pro_a',pro_a)
### Direccion b - Mismo metodo pero esta vez aplicado a la linea 'b'
for n in range (len(l_sen_b)):
# Leyendo la ultima fila en columna l_sen_a [ n ]
for i in range ( ini_b ,ron_b[0] +1 ):
# leyendo las columnas en el mismo orden que con 'a'
c.execute('SELECT {} FROM b WHERE num_b = ?'.format( l_sen_b[n]), ( i , ))
# guardando esa consulta en bd
bd = c.fetchone()
# agregando esa colsuta a la lista de consultas
lista_col.append( bd[0] )
# sacando promedio de la lista
for y in range (len(lista_col) -1):
# Obteniendo la sustracion de t2 - t1 para cada valor en lista_col
tdel = datetime.strptime(lista_col[y+1], FMT) - datetime.strptime(lista_col[y], FMT)
# Guardando el valor en segundos
t_float = tdel.total_seconds()
# Guardando el valor en la lista_t
lista_t.append( t_float )
# obteniedo el promedio de esa columna y agragandolo a la lista de promedios
pro_b.append( sum(lista_t)/ len(lista_t) )
#borrando la listas para los siguientes datos
lista_col.clear()
lista_t.clear()
print('Promedios finales pro_b',pro_b)
print()
# --------------------------------------------------------------------- #
################ Obtener tiempo promedio entre estaciones ###############
# ------------Promedio de tiempo de arrivo entre estaciones------------ #
#listas para guardar restas
t_e1 = []
t_e2 = []
# Direccion a
# para i en rango ini_a hasta el registro de rondas del ultimo sensor
for i in range ( ini_a ,ron_a[2] +1 ):
# obteniendo la fila completa por cada i se cambia de fila
c.execute('SELECT * FROM a WHERE num_a =?', (i,))
bd = c.fetchone()
# --------------------------------------------------------------------- #
########## Aqui debe ir un for para automatizarlo ######################
# for z en rengo z comenzando por 1 y terminando el la longitud de la lista de sensores otra vez
# for z in range ( 1, len(l_sen_a) ):
# tdel = datetime.strptime(bd[z+1], FMT) - datetime.strptime(bd[z], FMT)
# # guardando el valor en segundos
# t_float = tdel.total_seconds()
# # agregando a la lista # aqui necesito hacer que se guarde segun el valor de z en una un otra lista
# if z == 1:
# t_e1.append( t_float )
# else:
# t_e2.append( t_float )
# original ###
# restando los tiempos el la lista bd
tdel = datetime.strptime(bd[2], FMT) - datetime.strptime(bd[1], FMT)
# guardando la resta en segundos
t_float = tdel.total_seconds()
# agregando a la lista de tiempos entre estacion 1
t_e1.append( t_float )
# restando los tiempos el la lista bd
tdel = datetime.strptime(bd[3], FMT) - datetime.strptime(bd[2], FMT)
# guardando la resta en segundos
t_float = tdel.total_seconds()
# agregando a la lista de tiempos entre estacion 2
t_e2.append( t_float )
print('Los tiempos promedio direccion a son: ')
# sacando el promedio de las listas
pro_e1 = sum(t_e1)/len(t_e1)
print("El promedio entre estaciones 1 y 2 es", pro_e1)
pro_e2 = sum(t_e2)/len(t_e2)
print("El promedio entre estaciones 2 y 3 es", pro_e2)
print()
#borrando la listas
t_e1.clear()
t_e2.clear()
# Direccion b
for i in range ( ini_b ,ron_b[0] +1 ):
#obteniendo la fila
c.execute('SELECT * FROM a WHERE num_a =?', (i,))
bd = c.fetchone()
# restando
tdel = datetime.strptime(bd[2], FMT) - datetime.strptime(bd[1], FMT)
t_float = tdel.total_seconds()
# agregando a la lista
t_e1.append( t_float )
tdel = datetime.strptime(bd[3], FMT) - datetime.strptime(bd[2], FMT)
t_float = tdel.total_seconds()
t_e2.append( t_float )
print("El promedio entre estaciones B:")
pro_e1 = sum(t_e1)/len(t_e1)
print("El promedio entre estaciones es", pro_e1)
pro_e2 = sum(t_e2)/len(t_e2)
print("El promedio entre estaciones es", pro_e2)
#borrando la listas
t_e1.clear()
t_e2.clear()
# cerrando conexiones con la bd
conexion.close()
# Error de vueltas - Las vueltas comienzan asi A [1, 2, 1] -
# Debo hacer que la maquina no adminta entradas si por ejemplo # vueltas sen1 es menor que sen2
# Si admite entradas entonces la funcion de sumar y restar saldra muy mal
# Pero si por ejemplo hago los calculos con esos numeros tambien habra problemas
# QUE HAGO????????????
# Si pasa un caso asi lo mejor sera borrar toda esa linea de vueltas algo asi de que si recibo un
# respoesta de un sensor que no espero lo mejor sera borrar ya toda esa FILA entera
# if #_v_sen2 > #_v_sen1 or #_v_sen3 > #_v_sen2 delete - no hacer la entrada en la bd y borrar
# La ultima vuelta y poner un WARNING para que el resto de entradas que se puedan a llegar a hacer
# de los siguientes entradas de los siguientes sensores no se registren hasta terminar tal error de vuelta
# mientras tanto se reinicia un vuelta desde el primer sensor resgitros que si se llevan acabo
# Tareas para manana:
# Revisar ultimo for en la seccion para obtener tiempo entre estaciones
# Revisar los ultimos comentarios para evitar que la bd obtenga datos erroneos
# tengo que hacer que el sistema este en espera algo asi como que espera la llegada de entradas de#
# Tengo que hacer un sistema de trafico de llegadas es decir, una variable que tome control de los registros
# Esta variable una vez accionada por el primer sensor esperaria que
# Llgara el se envie una solicitud de registro para llenar o sensor 1 o para llenar sensor dos es decir
# Solo espera llegadas de por ejemplo tren que sigue en la siguiente estacion o de una anterior
# Pero que permita que haya registros que cualquiera de las anteriores
# patron queda como que los numero a la izquierda siempre deben ser mayores a los de la derecha SIEMPRE
# simple cada que haya un intento de registro hacer que compare el registro que se quiere hacer
# 1 Dejar que el registro modifique las rondas
# 2 Revisar como quedan las rondas despues de ese registros
# 3 si el registro que se hizo cumple con la condicion de mantener todos los numeros de la izquierda iguales o mayoyes
# Accepta el registro
# Si no accepta el registro entonces ...
# Significara que hubo un error de recepcion en los datos y que toda esa linea estara mal
# pero como saber que linea eliminar es ahi el problema quiza aqui si aga falta revisar los datos
# de los ultimos registros, pero ...
# solo no aceptara ese registro | [
"noreply@github.com"
] | noreply@github.com |
67d0a2a954213d42ddd71266366f19adab9b7138 | 71678f708e7bb80577b560ab660af2d965f7fa88 | /test.py | 23bb3cfd232d7d25db37f4d52705132a55b38aeb | [] | no_license | natepill/CS-1.2-Tweet-Generator | 7c09b396f37b56c5be45edfa603821389848853f | 64736b69a3701c34ba5f36153af1fa4ad0fef84c | refs/heads/master | 2020-04-02T18:25:45.848092 | 2019-03-13T17:05:47 | 2019-03-13T17:05:47 | 154,699,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | from string import ascii_lowercase as al
print(al)
| [
"natepill@gmail.com"
] | natepill@gmail.com |
41f288bd19aa9b05ca5271135fc73d166ec9a820 | d83b4b27c9abc634dac38315269dcf3de06bbe51 | /tests/spaces/test_utils.py | 9443e711a6cb4da6700c07a6f5b7d3d615f70d9d | [
"MIT"
] | permissive | ivannz/gym | 3817ecf8de9f22ea8f8b4d20ff309acbddd4d3bd | a7e1861f5c4aba1bf2f95d1d7de03e5fc5cbd530 | refs/heads/master | 2022-06-29T23:58:49.385077 | 2022-06-16T16:29:50 | 2022-06-16T16:29:50 | 241,954,848 | 0 | 0 | NOASSERTION | 2020-02-20T18:06:02 | 2020-02-20T18:06:01 | null | UTF-8 | Python | false | false | 9,801 | py | from collections import OrderedDict
import numpy as np
import pytest
from gym.spaces import (
Box,
Dict,
Discrete,
Graph,
MultiBinary,
MultiDiscrete,
Tuple,
utils,
)
homogeneous_spaces = [
Discrete(3),
Box(low=0.0, high=np.inf, shape=(2, 2)),
Box(low=0.0, high=np.inf, shape=(2, 2), dtype=np.float16),
Tuple([Discrete(5), Discrete(10)]),
Tuple(
[
Discrete(5),
Box(low=np.array([0.0, 0.0]), high=np.array([1.0, 5.0]), dtype=np.float64),
]
),
Tuple((Discrete(5), Discrete(2), Discrete(2))),
MultiDiscrete([2, 2, 10]),
MultiBinary(10),
Dict(
{
"position": Discrete(5),
"velocity": Box(
low=np.array([0.0, 0.0]), high=np.array([1.0, 5.0]), dtype=np.float64
),
}
),
Discrete(3, start=2),
Discrete(8, start=-5),
]
flatdims = [3, 4, 4, 15, 7, 9, 14, 10, 7, 3, 8]
graph_spaces = [
Graph(node_space=Box(low=-100, high=100, shape=(3, 4)), edge_space=Discrete(5)),
Graph(node_space=Discrete(5), edge_space=Box(low=-100, high=100, shape=(3, 4))),
Graph(node_space=Discrete(5), edge_space=None),
]
@pytest.mark.parametrize(["space", "flatdim"], zip(homogeneous_spaces, flatdims))
def test_flatdim(space, flatdim):
dim = utils.flatdim(space)
assert dim == flatdim, f"Expected {dim} to equal {flatdim}"
@pytest.mark.parametrize("space", homogeneous_spaces)
def test_flatten_space_boxes(space):
flat_space = utils.flatten_space(space)
assert isinstance(flat_space, Box), f"Expected {type(flat_space)} to equal {Box}"
flatdim = utils.flatdim(space)
(single_dim,) = flat_space.shape
assert single_dim == flatdim, f"Expected {single_dim} to equal {flatdim}"
@pytest.mark.parametrize("space", homogeneous_spaces + graph_spaces)
def test_flat_space_contains_flat_points(space):
some_samples = [space.sample() for _ in range(10)]
flattened_samples = [utils.flatten(space, sample) for sample in some_samples]
flat_space = utils.flatten_space(space)
for i, flat_sample in enumerate(flattened_samples):
assert flat_space.contains(
flat_sample
), f"Expected sample #{i} {flat_sample} to be in {flat_space}"
@pytest.mark.parametrize("space", homogeneous_spaces)
def test_flatten_dim(space):
sample = utils.flatten(space, space.sample())
(single_dim,) = sample.shape
flatdim = utils.flatdim(space)
assert single_dim == flatdim, f"Expected {single_dim} to equal {flatdim}"
@pytest.mark.parametrize("space", homogeneous_spaces + graph_spaces)
def test_flatten_roundtripping(space):
some_samples = [space.sample() for _ in range(10)]
flattened_samples = [utils.flatten(space, sample) for sample in some_samples]
roundtripped_samples = [
utils.unflatten(space, sample) for sample in flattened_samples
]
for i, (original, roundtripped) in enumerate(
zip(some_samples, roundtripped_samples)
):
assert compare_nested(
original, roundtripped
), f"Expected sample #{i} {original} to equal {roundtripped}"
def compare_nested(left, right):
if isinstance(left, np.ndarray) and isinstance(right, np.ndarray):
return np.allclose(left, right)
elif isinstance(left, OrderedDict) and isinstance(right, OrderedDict):
res = len(left) == len(right)
for ((left_key, left_value), (right_key, right_value)) in zip(
left.items(), right.items()
):
if not res:
return False
res = left_key == right_key and compare_nested(left_value, right_value)
return res
elif isinstance(left, (tuple, list)) and isinstance(right, (tuple, list)):
res = len(left) == len(right)
for (x, y) in zip(left, right):
if not res:
return False
res = compare_nested(x, y)
return res
else:
return left == right
"""
Expecteded flattened types are based off:
1. The type that the space is hardcoded as(ie. multi_discrete=np.int64, discrete=np.int64, multi_binary=np.int8)
2. The type that the space is instantiated with(ie. box=np.float32 by default unless instantiated with a different type)
3. The smallest type that the composite space(tuple, dict) can be represented as. In flatten, this is determined
internally by numpy when np.concatenate is called.
"""
expected_flattened_dtypes = [
np.int64,
np.float32,
np.float16,
np.int64,
np.float64,
np.int64,
np.int64,
np.int8,
np.float64,
np.int64,
np.int64,
]
@pytest.mark.parametrize(
["original_space", "expected_flattened_dtype"],
zip(homogeneous_spaces, expected_flattened_dtypes),
)
def test_dtypes(original_space, expected_flattened_dtype):
flattened_space = utils.flatten_space(original_space)
original_sample = original_space.sample()
flattened_sample = utils.flatten(original_space, original_sample)
unflattened_sample = utils.unflatten(original_space, flattened_sample)
assert flattened_space.contains(
flattened_sample
), "Expected flattened_space to contain flattened_sample"
assert (
flattened_space.dtype == expected_flattened_dtype
), f"Expected flattened_space's dtype to equal {expected_flattened_dtype}"
assert flattened_sample.dtype == flattened_space.dtype, (
"Expected flattened_space's dtype to equal " "flattened_sample's dtype "
)
compare_sample_types(original_space, original_sample, unflattened_sample)
def compare_sample_types(original_space, original_sample, unflattened_sample):
if isinstance(original_space, Discrete):
assert isinstance(unflattened_sample, int), (
"Expected unflattened_sample to be an int. unflattened_sample: "
"{} original_sample: {}".format(unflattened_sample, original_sample)
)
elif isinstance(original_space, Tuple):
for index in range(len(original_space)):
compare_sample_types(
original_space.spaces[index],
original_sample[index],
unflattened_sample[index],
)
elif isinstance(original_space, Dict):
for key, space in original_space.spaces.items():
compare_sample_types(space, original_sample[key], unflattened_sample[key])
else:
assert unflattened_sample.dtype == original_sample.dtype, (
"Expected unflattened_sample's dtype to equal "
"original_sample's dtype. unflattened_sample: "
"{} original_sample: {}".format(unflattened_sample, original_sample)
)
samples = [
2,
np.array([[1.0, 3.0], [5.0, 8.0]], dtype=np.float32),
np.array([[1.0, 3.0], [5.0, 8.0]], dtype=np.float16),
(3, 7),
(2, np.array([0.5, 3.5], dtype=np.float32)),
(3, 0, 1),
np.array([0, 1, 7], dtype=np.int64),
np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=np.int8),
OrderedDict(
[("position", 3), ("velocity", np.array([0.5, 3.5], dtype=np.float32))]
),
3,
-2,
]
expected_flattened_samples = [
np.array([0, 0, 1], dtype=np.int64),
np.array([1.0, 3.0, 5.0, 8.0], dtype=np.float32),
np.array([1.0, 3.0, 5.0, 8.0], dtype=np.float16),
np.array([0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], dtype=np.int64),
np.array([0, 0, 1, 0, 0, 0.5, 3.5], dtype=np.float64),
np.array([0, 0, 0, 1, 0, 1, 0, 0, 1], dtype=np.int64),
np.array([1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], dtype=np.int64),
np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=np.int8),
np.array([0, 0, 0, 1, 0, 0.5, 3.5], dtype=np.float64),
np.array([0, 1, 0], dtype=np.int64),
np.array([0, 0, 0, 1, 0, 0, 0, 0], dtype=np.int64),
]
@pytest.mark.parametrize(
["space", "sample", "expected_flattened_sample"],
zip(homogeneous_spaces, samples, expected_flattened_samples),
)
def test_flatten(space, sample, expected_flattened_sample):
assert sample in space
flattened_sample = utils.flatten(space, sample)
assert flattened_sample.shape == expected_flattened_sample.shape
assert flattened_sample.dtype == expected_flattened_sample.dtype
assert np.all(flattened_sample == expected_flattened_sample)
@pytest.mark.parametrize(
["space", "flattened_sample", "expected_sample"],
zip(homogeneous_spaces, expected_flattened_samples, samples),
)
def test_unflatten(space, flattened_sample, expected_sample):
sample = utils.unflatten(space, flattened_sample)
assert compare_nested(sample, expected_sample)
expected_flattened_spaces = [
Box(low=0, high=1, shape=(3,), dtype=np.int64),
Box(low=0.0, high=np.inf, shape=(4,), dtype=np.float32),
Box(low=0.0, high=np.inf, shape=(4,), dtype=np.float16),
Box(low=0, high=1, shape=(15,), dtype=np.int64),
Box(
low=np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], dtype=np.float64),
high=np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 5.0], dtype=np.float64),
dtype=np.float64,
),
Box(low=0, high=1, shape=(9,), dtype=np.int64),
Box(low=0, high=1, shape=(14,), dtype=np.int64),
Box(low=0, high=1, shape=(10,), dtype=np.int8),
Box(
low=np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], dtype=np.float64),
high=np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 5.0], dtype=np.float64),
dtype=np.float64,
),
Box(low=0, high=1, shape=(3,), dtype=np.int64),
Box(low=0, high=1, shape=(8,), dtype=np.int64),
]
@pytest.mark.parametrize(
["space", "expected_flattened_space"],
zip(homogeneous_spaces, expected_flattened_spaces),
)
def test_flatten_space(space, expected_flattened_space):
flattened_space = utils.flatten_space(space)
assert flattened_space == expected_flattened_space
| [
"noreply@github.com"
] | noreply@github.com |
9447b7349c9b860843a4f913b233ccf07a5219f4 | 16102a426b34dc71573cf056ef5fdf2285fa9266 | /myschool/details/admin.py | 5ea6fc10b8b8fc81757a25ddbe52e989fac74475 | [] | no_license | shreyajaggi/Django-LoginSystem | ae15b439c16ca505b5dc0ed815e5ac110369f5df | 2462e1c78365614f7a4748d11beb96bb0dd5e7ca | refs/heads/master | 2020-04-06T09:47:41.195334 | 2018-01-12T09:35:43 | 2018-01-12T09:35:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from details.models import Teacher , Student , Class, Homework
# Register your models here.
admin.site.register(Class)
admin.site.register(Teacher)
admin.site.register(Student)
admin.site.register(Homework) | [
"dishamendiratta@gmail.com"
] | dishamendiratta@gmail.com |
9a34d006505c0eb7e0cea42b26ab77fca7370299 | b927f3d572a33d4528efb1bd5315fd53a5905660 | /audioname.py | 6a18d3618f7af0a9b07eed5c144d3486ab7a3e4d | [] | no_license | OSU-Mapper/osu_file_process | 5de00440ccc952108171e82fb380165c47e7f5c0 | d77ef4060f4e6e6db6ac280c26a8d3458441bdad | refs/heads/master | 2021-01-19T10:39:24.483680 | 2017-04-28T02:26:00 | 2017-04-28T02:26:00 | 87,887,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 949 | py | import sys
import argparse
def searchSublevel(x, y):
with open(sys.argv[1], 'r') as my_file:
lines = my_file.readlines()
line_iter = iter(lines)
find = False
target = "[" + x + "]"
for line in line_iter:
if target in line:
find = True
break
if (find):
for line in line_iter:
if len(line.strip()) != 0:
if y in line:
l, r = line.split(':')
print(r.strip())
break
else:
print("No such infomation in " + x)
break
else:
print( "No such infomation in file")
if __name__ == "__main__":
if len(sys.argv) != 2:
raise argparse.ArgumentTypeError('the number of argument has to be 2')
exit(-1)
searchSublevel("General", "AudioFilename")
| [
"ly976@nyu.edu"
] | ly976@nyu.edu |
eee558f9a6af4b9e43f71b98cab0b09ae33c1e81 | f4087f2c8dbcc250e67aa6f1ed0a52d33ef10f1c | /BOTS/node_modules/nodegit/binding.gyp | a499bf2dfd055d72dbc5e7fe3693fbc96f5fe0dc | [
"MIT"
] | permissive | Grey-Wolf-PO/Fudsonstead-Main | 32cc560e9205fd93213ffd19d6fb445e68f14690 | ee91d7bb3f4fa52f0f2c9862a4544dab1fc901eb | refs/heads/master | 2021-01-10T17:41:23.903881 | 2016-02-29T18:43:45 | 2016-02-29T18:43:45 | 55,607,993 | 0 | 1 | null | 2016-04-06T13:24:47 | 2016-04-06T13:24:47 | null | UTF-8 | Python | false | false | 5,249 | gyp | # // This is a generated file, modify: generate/templates/binding.gyp.
{
"targets": [{
"target_name": "nodegit",
"dependencies": [
"vendor/libgit2.gyp:libgit2"
],
"variables": {
"coverage%": 0
},
"sources": [
"src/lock_master.cc",
"src/nodegit.cc",
"src/init_ssh2.cc",
"src/promise_completion.cc",
"src/wrapper.cc",
"src/functions/copy.cc",
"src/functions/sleep_for_ms.cc",
"src/convenient_patch.cc",
"src/convenient_hunk.cc",
"src/str_array_converter.cc",
"src/annotated_commit.cc",
"src/attr.cc",
"src/blame.cc",
"src/blame_hunk.cc",
"src/blame_options.cc",
"src/blob.cc",
"src/branch.cc",
"src/branch_iterator.cc",
"src/buf.cc",
"src/cert.cc",
"src/cert_hostkey.cc",
"src/cert_x509.cc",
"src/checkout.cc",
"src/checkout_options.cc",
"src/cherrypick.cc",
"src/cherrypick_options.cc",
"src/clone.cc",
"src/clone_options.cc",
"src/commit.cc",
"src/config.cc",
"src/config_entry.cc",
"src/config_entry.cc",
"src/cred.cc",
"src/cred_default.cc",
"src/cred_username.cc",
"src/cred_userpass_payload.cc",
"src/cvar_map.cc",
"src/describe_format_options.cc",
"src/describe_options.cc",
"src/diff.cc",
"src/diff_binary.cc",
"src/diff_binary_file.cc",
"src/diff_delta.cc",
"src/diff_file.cc",
"src/diff_find_options.cc",
"src/diff_hunk.cc",
"src/diff_line.cc",
"src/diff_options.cc",
"src/diff_options.cc",
"src/diff_perfdata.cc",
"src/diff_perfdata.cc",
"src/diff_stats.cc",
"src/error.cc",
"src/fetch.cc",
"src/fetch_options.cc",
"src/fetch_options.cc",
"src/filter.cc",
"src/filter.cc",
"src/filter_list.cc",
"src/giterr.cc",
"src/graph.cc",
"src/hashsig.cc",
"src/ignore.cc",
"src/index.cc",
"src/index_conflict_iterator.cc",
"src/index_entry.cc",
"src/index_time.cc",
"src/indexer.cc",
"src/libgit2.cc",
"src/mempack.cc",
"src/merge.cc",
"src/merge_file_input.cc",
"src/merge_file_options.cc",
"src/merge_file_result.cc",
"src/merge_options.cc",
"src/merge_options.cc",
"src/merge_result.cc",
"src/message.cc",
"src/note.cc",
"src/note_iterator.cc",
"src/object.cc",
"src/odb.cc",
"src/odb_object.cc",
"src/oid.cc",
"src/oid_shorten.cc",
"src/oidarray.cc",
"src/openssl.cc",
"src/packbuilder.cc",
"src/patch.cc",
"src/pathspec.cc",
"src/pathspec_match_list.cc",
"src/push.cc",
"src/push_options.cc",
"src/push_update.cc",
"src/rebase.cc",
"src/rebase_operation.cc",
"src/rebase_options.cc",
"src/refdb.cc",
"src/reference.cc",
"src/reflog.cc",
"src/reflog_entry.cc",
"src/refspec.cc",
"src/remote.cc",
"src/remote_callbacks.cc",
"src/remote_callbacks.cc",
"src/repository.cc",
"src/repository_init_options.cc",
"src/reset.cc",
"src/revert.cc",
"src/revert_options.cc",
"src/revparse.cc",
"src/revwalk.cc",
"src/signature.cc",
"src/smart.cc",
"src/stash.cc",
"src/stash_apply_options.cc",
"src/status.cc",
"src/status_entry.cc",
"src/status_list.cc",
"src/status_options.cc",
"src/strarray.cc",
"src/submodule.cc",
"src/submodule_update_options.cc",
"src/tag.cc",
"src/time.cc",
"src/trace.cc",
"src/transaction.cc",
"src/transfer_progress.cc",
"src/transport.cc",
"src/tree.cc",
"src/tree_entry.cc",
"src/treebuilder.cc",
"src/writestream.cc",
],
"include_dirs": [
"vendor/libv8-convert",
"vendor/libssh2/include",
"vendor/openssl/openssl/include",
"<!(node -e \"require('nan')\")"
],
"cflags": [
"-Wall"
],
"conditions": [
[
"coverage==1", {
"cflags": [
"-ftest-coverage",
"-fprofile-arcs"
],
"link_settings": {
"libraries": [
"-lgcov"
]
},
}
],
[
"OS=='mac'", {
"xcode_settings": {
"GCC_ENABLE_CPP_EXCEPTIONS": "YES",
"MACOSX_DEPLOYMENT_TARGET": "10.7",
"WARNING_CFLAGS": [
"-Wno-unused-variable",
"-Wint-conversions",
"-Wmissing-field-initializers",
"-Wno-c++11-extensions"
]
}
}
],
[
"OS=='win'", {
"cflags": [
"/EHsc"
],
"defines": [
"_HAS_EXCEPTIONS=1"
]
}
],
[
"OS=='linux' and '<!(echo \"$CXX\")'=='clang++'", {
"cflags": [
"-Wno-c++11-extensions"
]
}
],
[
"OS=='linux' and '<!(echo \"$CXX\")'!='clang++'", {
"cflags": [
"-std=c++0x"
]
}
]
]
}]
} | [
"littlelnu2@yahoo.com"
] | littlelnu2@yahoo.com |
df0732ff6db9870819104b3cb75ff8aa2279e786 | bf235b33e11013f42cc585eb5919e8b835bdf12c | /musical/audio/source.py | f842be347aa71cc229025a45c013bd97035d1802 | [] | no_license | Slater-Victoroff/BerkleeMusicHack | 98c3b4758d4bdd5ba0acc4f2c8fa96837935c099 | f8ff7d4e35132917d335aa37880bebb9de8fad4f | refs/heads/master | 2016-09-06T05:14:47.220950 | 2015-02-06T22:56:48 | 2015-02-06T22:56:48 | 18,229,937 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,870 | py | from collections import deque
import math
import random
import numpy
from scipy.signal import waveforms
def silence(length, rate=44100):
''' Generate 'length' seconds of silence at 'rate'
'''
return numpy.zeros(int(length * rate))
def pygamesound(sound):
''' Create numpy array from pygame sound object
rate is determined by pygame.mixer settings
'''
import pygame
pygame.sndarray.use_arraytype('numpy')
array = pygame.sndarray.array(sound)
rate, format, channels = pygame.mixer.get_init()
data = numpy.zeros(len(array))
for i, sample in enumerate(array):
data[i] = sum(sample)
if format < 0:
data /= (2 ** -format) / 2
else:
data = (data / (2 ** format)) * 2 - 1
return data
def generate_wave_input(freq, length, rate=44100, phase=0.0):
''' Used by waveform generators to create frequency-scaled input array
Courtesy of threepineapples:
https://code.google.com/p/python-musical/issues/detail?id=2
'''
length = int(length * rate)
t = numpy.arange(length) / float(rate)
omega = float(freq) * 2 * math.pi
phase *= 2 * math.pi
return omega * t + phase
def sine(freq, length, rate=44100, phase=0.0):
''' Generate sine wave for frequency of 'length' seconds long
at a rate of 'rate'. The 'phase' of the wave is the percent (0.0 to 1.0)
into the wave that it starts on.
'''
data = generate_wave_input(freq, length, rate, phase)
return numpy.sin(data)
def sawtooth(freq, length, rate=44100, phase=0.0):
''' Generate sawtooth wave for frequency of 'length' seconds long
at a rate of 'rate'. The 'phase' of the wave is the percent (0.0 to 1.0)
into the wave that it starts on.
'''
data = generate_wave_input(freq, length, rate, phase)
return waveforms.sawtooth(data)
def square(freq, length, rate=44100, phase=0.0):
''' Generate square wave for frequency of 'length' seconds long
at a rate of 'rate'. The 'phase' of the wave is the percent (0.0 to 1.0)
into the wave that it starts on.
'''
data = generate_wave_input(freq, length, rate, phase)
return waveforms.square(data)
def ringbuffer(data, length, decay=1.0, rate=44100):
''' Repeat data for 'length' amount of time, smoothing to reduce higher
frequency oscillation. decay is the rcent of amplitude decrease.
'''
phase = len(data)
length = int(rate * length)
out = numpy.resize(data, length)
for i in xrange(phase, length):
index = i - phase
out[i] = (out[index] + out[index + 1]) * 0.5 * decay
return out
def pluck(freq, length, decay=0.998, rate=44100):
''' Create a pluck noise at freq by sending white noise through a ring buffer
http://en.wikipedia.org/wiki/Karplus-Strong_algorithm
'''
freq = float(freq)
phase = int(rate / freq)
data = numpy.random.random(phase) * 2 - 1
return ringbuffer(data, length, decay, rate)
| [
"ezra.varady@students.olin.edu"
] | ezra.varady@students.olin.edu |
2d441b942de17b1981ea070088659addc116d4ac | 4f3a4c194451eae32f1ff7cf3b0db947e3892365 | /142/main.py | 7dd2d69286c4280a2dc6408e5232b45fffb6d8a6 | [] | no_license | szhongren/leetcode | 84dd848edbfd728b344927f4f3c376b89b6a81f4 | 8cda0518440488992d7e2c70cb8555ec7b34083f | refs/heads/master | 2021-12-01T01:34:54.639508 | 2021-11-30T05:54:45 | 2021-11-30T05:54:45 | 83,624,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,177 | py | """
Given a linked list, return the node where the cycle begins. If there is no cycle, return null.
Note: Do not modify the linked list.
Follow up:
Can you solve it without using extra space?
"""
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def make_list(ls):
if len(ls) == 0:
return None
list_nodes = list(map(lambda x: ListNode(x), ls))
for i, v in enumerate(list_nodes[1:]):
list_nodes[i].next = v
return list_nodes[0]
class Solution(object):
def detectCycle(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if head == None:
return None
slow = head
fast = head
cycle_start = head
while slow.next and fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
if slow == fast:
while cycle_start != slow:
cycle_start = cycle_start.next
slow = slow.next
return slow
return None
| [
"shao.zhongren@gmail.com"
] | shao.zhongren@gmail.com |
f22619865182d3c103c4581589755fccfd1cdb48 | d835c2969b79d28dc034313ec2799deffdbf0f80 | /Products/models.py | 8fff49ed6b8744ed7a0fec113fbf5b2f0e850c18 | [] | no_license | RoodrigoRoot/Agroquimicos | 139e087cf4e14f369704f1d2ab8cd4293f22ca1f | 431d553c24b92c5f3cd5305944035ec01f723f15 | refs/heads/master | 2021-03-03T17:20:41.328778 | 2020-03-09T08:01:02 | 2020-03-09T08:01:02 | 245,975,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,016 | py | from django.db import models
from django.contrib.auth.models import User
from django.utils.text import slugify
from django.db.models.signals import pre_save
# Create your models here.
class Category(models.Model):
CATEGORY = [
("Liquido","Liquido"),
("Polvo", "Polvo")
]
name = models.CharField(("Nombre"), max_length=100, choices=CATEGORY)
description = models.TextField(("Descripción"), null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True, auto_now=False)
modified_at = models.DateTimeField(auto_now_add=False, auto_now=True)
class Meta:
verbose_name = "Categoria"
verbose_name_plural = "Categorias"
def __str__(self):
return self.name
class Product(models.Model):
name = models.CharField(("Producto"), max_length=100, blank=False, null=False)
description = models.TextField(("Descripción"), blank=True, null=True)
category = models.ForeignKey(Category, verbose_name=("Categoria"), on_delete=models.CASCADE)
slug = models.SlugField(unique=True)
user = models.ForeignKey(User, verbose_name=("Usuario"), on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True, auto_now=False)
modified_at = models.DateTimeField(auto_now_add=False, auto_now=True)
class Meta:
verbose_name = "Producto"
verbose_name_plural = "Productos"
def __str__(self):
return self.name
def create_slug_product(sender, instance, *args, **kwargs):
instance.slug = slugify("{}-{}".format(instance.name, instance.category))
pre_save.connect(create_slug_product, sender=Product)
class DetailProduct(models.Model):
product = models.OneToOneField(Product, on_delete=models.CASCADE)
expiration = models.DateField(("Expiración"), auto_now=False, auto_now_add=False)
price = models.FloatField(("Precio"))
created_at = models.DateTimeField(auto_now_add=True, auto_now=False)
modified_at = models.DateTimeField(auto_now_add=False, auto_now=True)
user = models.ForeignKey(User, verbose_name=("Usuario"), on_delete=models.CASCADE)
class Meta:
verbose_name = "Detalle del Producto"
verbose_name_plural = "Detalles del Producto"
def __str__(self):
return "Detalles de {}".format(self.product)
class Warehouse(models.Model):
product = models.OneToOneField( Product, on_delete=models.CASCADE)
user = models.ForeignKey( User, on_delete=models.CASCADE)
quantity = models.IntegerField(("Cantidad"), default=0, blank=False, null=False)
user = models.ForeignKey(User, verbose_name=("Usuario"), on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True, auto_now=False)
modified_at = models.DateTimeField(auto_now_add=False, auto_now=True)
class Meta:
verbose_name = "Almacen"
verbose_name_plural = "Almacen"
def __str__(self):
return "{} : {}".format(self.product, self.quantity) | [
"roodrigo@gmail.com"
] | roodrigo@gmail.com |
ca30c1b795a7461a9b22a0129327db6d5ad19be3 | 92cdfaf5e5b134845ba20b08bfd08c33b1c6ed61 | /logging.py | 00db9be4ddbec52558c0433158ac25453239dd1f | [] | no_license | NETsharing/LOG_coloring | d1bce7eb31a0bd2ee859922ed255c907061626b7 | 8d3eb5bf15ad897d446c2d7f2ee24bff32254a6c | refs/heads/master | 2022-10-28T00:04:02.843176 | 2020-06-10T08:03:49 | 2020-06-10T08:03:49 | 271,216,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 976 | py | logging.addLevelName(logging.DEBUG, "\033[1;34m{}\033[1;0m".format(logging.getLevelName(logging.DEBUG)))
logging.addLevelName(logging.INFO, "\033[1;32m{}\033[1;0m".format(logging.getLevelName(logging.INFO)))
logging.addLevelName(logging.WARNING, "\033[1;33m{}\033[1;0m".format(logging.getLevelName(logging.WARNING)))
logging.addLevelName(logging.ERROR, "\033[1;31m{}\033[1;0m".format(logging.getLevelName(logging.ERROR)))
logging.addLevelName(logging.CRITICAL, "\033[1;41m{}\033[1;0m".format(logging.getLevelName(logging.CRITICAL)))
LOG_LEVEL = config['logging']['log_level']
LOG_PATH = config['logging']['log_path']
FORMATTER = logging.Formatter(config['logging']['log_format'])
stream_handler = logging.StreamHandler()
stream_handler.setLevel(LOG_LEVEL)
stream_handler.setFormatter(FORMATTER)
logger = logging.getLogger()
logger.addHandler(stream_handler)
file_handler = logging.FileHandler(LOG_PATH)
file_handler.setLevel(LOG_LEVEL)
file_handler.setFormatter(FORMATTER)
| [
"noreply@github.com"
] | noreply@github.com |
f2b84c17b5f2dc1d5d4926d53f21fd58c01a5549 | 8049ed737af21d9a4696b6a403f549393c099088 | /TP4/tp4-parte4.py | 7d1f0287d0fcf069b53e7794bf6d2c301f8f2e41 | [] | no_license | Rcastagnola/PDS | ed122c6d9182cd53d48b477da490a1baecb85abc | 22e7e502f867a7f5766d29464fa69060f0e2569c | refs/heads/master | 2020-03-30T06:45:04.165112 | 2018-12-21T19:51:19 | 2018-12-21T19:51:19 | 150,881,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,751 | py | import warnings
warnings.filterwarnings('ignore')
import scipy.signal as sig
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import scipy.io as sio
def vertical_flaten(a):
return a.reshape(a.shape[0],1)
mpl.rcParams['figure.figsize'] = (5,4)
sio.whosmat('ECG_TP4.mat')
mat_struct = sio.loadmat('ECG_TP4.mat')
señal = mat_struct['ecg_lead']
señal = señal.flatten(1)
N = len(señal)
fs = 1000 # Hz
nyq_frec = fs / 2
#hb_1 = vertical_flaten(mat_struct['heartbeat_pattern1'])
#hb_2 = vertical_flaten(mat_struct['heartbeat_pattern2'])
#plt.figure(1)
#plt.plot(señal[0:12000])
#sp = np.fft.fft(señal[0:10000])
#plt.figure(2)
#plt.plot(np.absolute(sp[0:100]))
######################################################################
ripple = 0.5 # dB
atenuacion = 40 # dB
ws1 = 1.0 #Hz
wp1 = 2.0 #Hz
wp2 = 40.0 #Hz
ws2 = 45.0 #Hz
frecs = np.array([0.0, ws1, wp1, wp2, ws2, nyq_frec ]) / nyq_frec
gains = np.array([-atenuacion, -atenuacion, -ripple, -ripple, -atenuacion, -atenuacion])
gains = 10**(gains/20)
#######################################################################
cant_coef = 501
FiltroWin = sig.firwin2(cant_coef, frecs, gains , window='hamming' )
_, spWin = sig.freqz(FiltroWin)
señalWin = sig.filtfilt(FiltroWin,1, señal)
#######################################################################
FiltroRemez = sig.remez(501, [0, 0.001, 0.002, 0.04, 0.045,0.5], [0, 1, 0])
_, spRemez = sig.freqz(FiltroRemez)
señalRemez = sig.filtfilt(FiltroRemez,1, señal)
########################################################################
FiltroButter = sig.iirdesign(wp=np.array([wp1, wp2]) / nyq_frec, ws=np.array([ws1, ws2]) / nyq_frec, gpass=0.5, gstop=40., analog=False, ftype='butter', output='sos')
_, spButter = sig.sosfreqz(FiltroButter)
señalButter = sig.sosfiltfilt(FiltroButter, señal)
###########################################################################
FiltroCheby = sig.iirdesign(wp=np.array([wp1, wp2]) / nyq_frec, ws=np.array([ws1, ws2]) / nyq_frec, gpass=0.5, gstop=40., analog=False, ftype='cheby2', output='sos')
_, spCheby = sig.sosfreqz(FiltroCheby)
señalCheby = sig.sosfiltfilt(FiltroCheby, señal)
########################################################################
LimitInfA = int(12*60*fs)
LimitSupA = int(12.4*60*fs)
LimitInfB = int(15*60*fs)
LimitSupB = int(15.2*60*fs)
LimitInfC = int(5*60*fs)
LimitSupC = int(5.2*60*fs)
LimitInfD = int(4000)
LimitSupD = int(5500)
LimitInfE = int(10000)
LimitSupE = int(11000)
plt.figure(1)
plt.title('Respuesta de los filtros')
plt.plot(20 * np.log10(abs(spWin[0:100])),label='Hamming')
plt.plot(20 * np.log10(abs(spButter[0:100])),label='Butter')
plt.plot(20 * np.log10(abs(spRemez[0:100])),label='Remez')
plt.plot(20 * np.log10(abs(spCheby[0:100])),label='Cheby')
axes_hdl = plt.gca()
axes_hdl.legend()
plt.figure(2)
plt.title('Zona con interferencia')
plt.plot(señal[LimitInfA:LimitSupA],label='ECG')
plt.plot(señalWin[LimitInfA:LimitSupA],label='Hamming')
plt.plot(señalButter[LimitInfA:LimitSupA],label='Butter')
plt.plot(señalRemez[LimitInfA:LimitSupA],label='Remez')
plt.plot(señalCheby[LimitInfA:LimitSupA],label='Cheby')
axes_hdl = plt.gca()
axes_hdl.legend()
plt.figure(3)
plt.title('Zona con interferencia')
plt.plot(señal[LimitInfB:LimitSupB],label='ECG')
plt.plot(señalWin[LimitInfB:LimitSupB],label='Hamming')
plt.plot(señalButter[LimitInfB:LimitSupB],label='Butter')
plt.plot(señalRemez[LimitInfB:LimitSupB],label='Remez')
plt.plot(señalCheby[LimitInfB:LimitSupB],label='Cheby')
axes_hdl = plt.gca()
axes_hdl.legend()
plt.figure(4)
plt.title('Zona sin interferencia')
plt.plot(señal[LimitInfC:LimitSupC],label='ECG')
plt.plot(señalWin[LimitInfC:LimitSupC],label='Hamming')
plt.plot(señalButter[LimitInfC:LimitSupC],label='Butter')
plt.plot(señalRemez[LimitInfC:LimitSupC],label='Remez')
plt.plot(señalCheby[LimitInfC:LimitSupC],label='Cheby')
axes_hdl = plt.gca()
axes_hdl.legend()
plt.figure(5)
plt.title('Zona sin interferencia')
plt.plot(señal[LimitInfD:LimitSupD],label='ECG')
plt.plot(señalWin[LimitInfD:LimitSupD],label='Hamming')
plt.plot(señalButter[LimitInfD:LimitSupD],label='Butter')
plt.plot(señalRemez[LimitInfD:LimitSupD],label='Remez')
plt.plot(señalCheby[LimitInfD:LimitSupD],label='Cheby')
axes_hdl = plt.gca()
axes_hdl.legend()
plt.figure(6)
plt.title('Zona sin interferencia')
plt.plot(señal[LimitInfE:LimitSupE],label='ECG')
plt.plot(señalWin[LimitInfE:LimitSupE],label='Hamming')
plt.plot(señalButter[LimitInfE:LimitSupE],label='Butter')
plt.plot(señalRemez[LimitInfE:LimitSupE],label='Remez')
plt.plot(señalCheby[LimitInfE:LimitSupE],label='Cheby')
axes_hdl = plt.gca()
axes_hdl.legend()
| [
"r.castagnola00@gmail.com"
] | r.castagnola00@gmail.com |
023f79199e787217370b883af1d2f8132672ba3d | 2107ca4708056eca65427748269d4fc810202840 | /examples/sites/holoviews/holoviews/core/data/pandas.py | c3898e37805556e1a6cc3a5d2969806d849581b5 | [] | permissive | python411/nbsite | 0fd98dbf889a3851df53ab920ae45f50d79c6c43 | 8977cce7cf54c73a687fc0ff705eee4644bc2684 | refs/heads/master | 2022-12-21T14:21:52.444689 | 2020-10-01T17:48:36 | 2020-10-01T17:48:36 | 300,369,678 | 0 | 0 | BSD-3-Clause | 2020-10-01T17:48:37 | 2020-10-01T17:44:31 | null | UTF-8 | Python | false | false | 11,330 | py | from __future__ import absolute_import
from distutils.version import LooseVersion
try:
import itertools.izip as zip
except ImportError:
pass
import numpy as np
import pandas as pd
from .interface import Interface, DataError
from ..dimension import Dimension
from ..element import Element
from ..dimension import OrderedDict as cyODict
from ..ndmapping import NdMapping, item_check
from .. import util
class PandasInterface(Interface):
types = (pd.DataFrame if pd else None,)
datatype = 'dataframe'
@classmethod
def dimension_type(cls, columns, dim):
name = columns.get_dimension(dim, strict=True).name
idx = list(columns.data.columns).index(name)
return columns.data.dtypes[idx].type
@classmethod
def init(cls, eltype, data, kdims, vdims):
element_params = eltype.params()
kdim_param = element_params['kdims']
vdim_param = element_params['vdims']
if util.is_dataframe(data):
if isinstance(kdim_param.bounds[1], int):
ndim = min([kdim_param.bounds[1], len(kdim_param.default)])
else:
ndim = None
nvdim = vdim_param.bounds[1] if isinstance(vdim_param.bounds[1], int) else None
if kdims and vdims is None:
vdims = [c for c in data.columns if c not in kdims]
elif vdims and kdims is None:
kdims = [c for c in data.columns if c not in vdims][:ndim]
elif kdims is None:
kdims = list(data.columns[:ndim])
if vdims is None:
vdims = [d for d in data.columns[ndim:((ndim+nvdim) if nvdim else None)]
if d not in kdims]
elif kdims == [] and vdims is None:
vdims = list(data.columns[:nvdim if nvdim else None])
if any(isinstance(d, (np.int64, int)) for d in kdims+vdims):
raise DataError("pandas DataFrame column names used as dimensions "
"must be strings not integers.", cls)
else:
# Check if data is of non-numeric type
# Then use defined data type
kdims = kdims if kdims else kdim_param.default
vdims = vdims if vdims else vdim_param.default
columns = [d.name if isinstance(d, Dimension) else d
for d in kdims+vdims]
if isinstance(data, dict) and all(c in data for c in columns):
data = cyODict(((d, data[d]) for d in columns))
elif isinstance(data, (list, dict)) and data in ([], {}):
data = None
elif (isinstance(data, dict) and not all(d in data for d in columns) and
not any(isinstance(v, np.ndarray) for v in data.values())):
column_data = sorted(data.items())
k, v = column_data[0]
if len(util.wrap_tuple(k)) != len(kdims) or len(util.wrap_tuple(v)) != len(vdims):
raise ValueError("Dictionary data not understood, should contain a column "
"per dimension or a mapping between key and value dimension "
"values.")
column_data = zip(*((util.wrap_tuple(k)+util.wrap_tuple(v))
for k, v in column_data))
data = cyODict(((c, col) for c, col in zip(columns, column_data)))
elif isinstance(data, np.ndarray):
if data.ndim == 1:
if eltype._auto_indexable_1d and len(kdims)+len(vdims)>1:
data = (np.arange(len(data)), data)
else:
data = np.atleast_2d(data).T
else:
data = tuple(data[:, i] for i in range(data.shape[1]))
if isinstance(data, tuple):
data = [np.array(d) if not isinstance(d, np.ndarray) else d for d in data]
if not cls.expanded(data):
raise ValueError('PandasInterface expects data to be of uniform shape.')
data = pd.DataFrame.from_items([(c, d) for c, d in
zip(columns, data)])
elif isinstance(data, dict) and any(c not in data for c in columns):
raise ValueError('PandasInterface could not find specified dimensions in the data.')
else:
data = pd.DataFrame(data, columns=columns)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def isscalar(cls, dataset, dim):
name = dataset.get_dimension(dim, strict=True).name
return len(dataset.data[name].unique()) == 1
@classmethod
def validate(cls, dataset, vdims=True):
dim_types = 'key' if vdims else 'all'
dimensions = dataset.dimensions(dim_types, label='name')
not_found = [d for d in dimensions if d not in dataset.data.columns]
if not_found:
raise DataError("Supplied data does not contain specified "
"dimensions, the following dimensions were "
"not found: %s" % repr(not_found), cls)
@classmethod
def range(cls, columns, dimension):
column = columns.data[columns.get_dimension(dimension, strict=True).name]
if column.dtype.kind == 'O':
if (not isinstance(columns.data, pd.DataFrame) or
LooseVersion(pd.__version__) < '0.17.0'):
column = column.sort(inplace=False)
else:
column = column.sort_values()
return column.iloc[0], column.iloc[-1]
else:
return (column.min(), column.max())
@classmethod
def concat(cls, columns_objs):
cast_objs = cls.cast(columns_objs)
return pd.concat([col.data for col in cast_objs])
@classmethod
def groupby(cls, columns, dimensions, container_type, group_type, **kwargs):
index_dims = [columns.get_dimension(d, strict=True) for d in dimensions]
element_dims = [kdim for kdim in columns.kdims
if kdim not in index_dims]
group_kwargs = {}
if group_type != 'raw' and issubclass(group_type, Element):
group_kwargs = dict(util.get_param_values(columns),
kdims=element_dims)
group_kwargs.update(kwargs)
group_by = [d.name for d in index_dims]
data = [(k, group_type(v, **group_kwargs)) for k, v in
columns.data.groupby(group_by, sort=False)]
if issubclass(container_type, NdMapping):
with item_check(False):
return container_type(data, kdims=index_dims)
else:
return container_type(data)
@classmethod
def aggregate(cls, columns, dimensions, function, **kwargs):
data = columns.data
cols = [d.name for d in columns.kdims if d in dimensions]
vdims = columns.dimensions('value', label='name')
reindexed = data[cols+vdims]
if function in [np.std, np.var]:
# Fix for consistency with other backend
# pandas uses ddof=1 for std and var
fn = lambda x: function(x, ddof=0)
else:
fn = function
if len(dimensions):
grouped = reindexed.groupby(cols, sort=False)
return grouped.aggregate(fn, **kwargs).reset_index()
else:
agg = reindexed.apply(fn, **kwargs)
return pd.DataFrame.from_items([(col, [v]) for col, v in
zip(agg.index, agg.values)])
@classmethod
def unpack_scalar(cls, columns, data):
"""
Given a columns object and data in the appropriate format for
the interface, return a simple scalar.
"""
if len(data) != 1 or len(data.columns) > 1:
return data
return data.iat[0,0]
@classmethod
def reindex(cls, columns, kdims=None, vdims=None):
# DataFrame based tables don't need to be reindexed
return columns.data
@classmethod
def redim(cls, dataset, dimensions):
column_renames = {k: v.name for k, v in dimensions.items()}
return dataset.data.rename(columns=column_renames)
@classmethod
def sort(cls, columns, by=[], reverse=False):
import pandas as pd
cols = [columns.get_dimension(d, strict=True).name for d in by]
if (not isinstance(columns.data, pd.DataFrame) or
LooseVersion(pd.__version__) < '0.17.0'):
return columns.data.sort(columns=cols, ascending=not reverse)
return columns.data.sort_values(by=cols, ascending=not reverse)
@classmethod
def select(cls, columns, selection_mask=None, **selection):
df = columns.data
if selection_mask is None:
selection_mask = cls.select_mask(columns, selection)
indexed = cls.indexed(columns, selection)
df = df.iloc[selection_mask]
if indexed and len(df) == 1 and len(columns.vdims) == 1:
return df[columns.vdims[0].name].iloc[0]
return df
@classmethod
def values(cls, columns, dim, expanded=True, flat=True):
dim = columns.get_dimension(dim, strict=True)
data = columns.data[dim.name]
if not expanded:
return data.unique()
return data.values
@classmethod
def sample(cls, columns, samples=[]):
data = columns.data
mask = False
for sample in samples:
sample_mask = True
if np.isscalar(sample): sample = [sample]
for i, v in enumerate(sample):
sample_mask = np.logical_and(sample_mask, data.iloc[:, i]==v)
mask |= sample_mask
return data[mask]
@classmethod
def add_dimension(cls, columns, dimension, dim_pos, values, vdim):
data = columns.data.copy()
if dimension.name not in data:
data.insert(dim_pos, dimension.name, values)
return data
@classmethod
def as_dframe(cls, dataset):
"""
Returns the data of a Dataset as a dataframe avoiding copying
if it already a dataframe type.
"""
if issubclass(dataset.interface, PandasInterface):
return dataset.data
else:
return dataset.dframe()
@classmethod
def dframe(cls, columns, dimensions):
if dimensions:
return columns.data[dimensions]
else:
return columns.data.copy()
@classmethod
def iloc(cls, dataset, index):
rows, cols = index
scalar = False
columns = list(dataset.data.columns)
if isinstance(cols, slice):
cols = [d.name for d in dataset.dimensions()][cols]
elif np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols).name]
else:
cols = [dataset.get_dimension(d).name for d in index[1]]
cols = [columns.index(c) for c in cols]
if np.isscalar(rows):
rows = [rows]
if scalar:
return dataset.data.iloc[rows[0], cols[0]]
return dataset.data.iloc[rows, cols]
Interface.register(PandasInterface)
| [
"noreply@github.com"
] | noreply@github.com |
d38191fa73340cb4127b28ff81cd00de74e6b5bc | e292dca36b668e661dc9b4fcd99a1cc7b4ebed5f | /polls/views.py | 0746b8cde3d1b26bb63bfaa680bcd51edfa6537b | [] | no_license | luochenUmich/Polls-Django- | 768e6d3c32f9b76a70bdde7c8f06c2f7186adf02 | a12068a227bf25111329da1d9c7feceff36c1107 | refs/heads/master | 2021-01-18T13:54:12.886719 | 2015-01-05T00:09:30 | 2015-01-05T00:09:30 | 28,781,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,174 | py | from django.shortcuts import render, get_object_or_404
from django.http import Http404
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext, loader
from django.core.urlresolvers import reverse
from django.views import generic
from polls.models import Question
# Create your views here.
class IndexView(generic.ListView):
template_name = 'polls/index.html'
context_object_name = 'latest_question_list'
def get_queryset(self):
return Question.objects.order_by('-pub_date')[:5]
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/details.html'
class ResultsView(generic.DetailView):
model = Question
template_name = 'polls/results.html'
def vote(request, question_id):
p = get_object_or_404(Question, pk=question_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
return render(request, 'polls/detail.html', {
'question': p,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
| [
"luochen@umich.edu"
] | luochen@umich.edu |
f4be1784fe13e6274c766985a165f620b822bcb1 | 930309163b930559929323647b8d82238724f392 | /abc216_e.py | b2c5d66d2e922c823160cdcb8e9ca31ca835c4d4 | [] | no_license | GINK03/atcoder-solvers | 874251dffc9f23b187faa77c439b445e53f8dfe1 | b1e7ac6e9d67938de9a85df4a2f9780fb1fbcee7 | refs/heads/master | 2021-11-07T14:16:52.138894 | 2021-09-12T13:32:29 | 2021-09-12T13:32:29 | 11,724,396 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,123 | py |
import sys
import logging
def main():
n, k = map(int, input().split())
a = list(map(int, input().split()))
a = sorted(a, reverse=True) + [0]
def cumsum(x):
return x * (x + 1) // 2
k_remaining = k
ans = 0
for i in range(n):
if a[i] == a[i + 1]:
continue
if k_remaining >= (i + 1) * (a[i] - a[i + 1]):
ans += (cumsum(a[i]) - cumsum(a[i + 1])) * (i + 1)
k_remaining -= (i + 1) * (a[i] - a[i + 1])
else:
j = k_remaining // (i + 1)
r = k_remaining % (i + 1)
logging.debug((j, r))
ans += (cumsum(a[i]) - cumsum(a[i] - j)) * (i + 1)
ans += (a[i] - j) * r
k_remaining = 0
if k_remaining == 0:
break
print(ans)
if __name__ == "__main__":
loglevel = "DEBUG" if "--debug" in sys.argv else "WARNING"
numeric_level = getattr(logging, loglevel, None)
log_format = "%(levelname)s (%(asctime)s.%(msecs)d): %(message)s"
logging.basicConfig(level=numeric_level, format=log_format, datefmt="%I:%M:%S")
main()
| [
"gim.kobayashi@gmail.com"
] | gim.kobayashi@gmail.com |
5327fd472f360b5a2ea48a5007c7a3dbba670729 | 8a69d04987be9171ef2074249d5577cec1da4d1a | /utilities/custom_logger.py | 28d6b9692a4d320c5fbbec67fbd3cb476e3503ee | [] | no_license | ppapazov/LKI | ca3646f775c24dae6bfee7ab7926d600f1994acd | 1ab8841dcd0251f4b9a8e059f34c7be6b8089de8 | refs/heads/master | 2021-05-10T16:57:50.050963 | 2018-01-28T11:49:25 | 2018-01-28T11:49:25 | 118,594,616 | 0 | 0 | null | 2018-01-28T11:07:29 | 2018-01-23T10:25:59 | Python | UTF-8 | Python | false | false | 650 | py | import inspect
import logging
def customLogger(logLevel=logging.DEBUG):
# Gets the name of the class / method from where this method was called
loggerName = inspect.stack()[1][3]
logger = logging.getLogger(loggerName)
# By default log all messages
logger.setLevel(logging.DEBUG)
fileHandler = logging.FileHandler("automation.log", mode='a')
fileHandler.setLevel(logLevel)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%m%d%Y %I:%M:%S %p')
fileHandler.setFormatter(formatter)
logger.addHandler(fileHandler)
return logger
| [
"4work.tasks@gmail.com"
] | 4work.tasks@gmail.com |
d3dcbf4bdf17db168481586b6de9e68c9e6a9311 | f6242191f90e0b0f40a84c44028b3bf1a0372b68 | /src/simulator/aurora.py | 9c90ae8450c9d79aac5166e2033225df7fa1031c | [
"Apache-2.0"
] | permissive | gcgeng/PCC-RL | c4daa2507df39202c74cd01c6ec989e1b83974e0 | 43f04fe4ee1378c6c2ea67254b2e4b598254d05e | refs/heads/master | 2023-09-06T05:32:12.385796 | 2021-07-27T06:27:37 | 2021-07-27T06:27:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,969 | py | import csv
import logging
import multiprocessing as mp
import os
import shutil
import time
import types
from typing import List
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
from mpi4py.MPI import COMM_WORLD
from mpi4py.futures import MPIPoolExecutor
import gym
import numpy as np
import tensorflow as tf
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
from stable_baselines import PPO1
from stable_baselines.bench import Monitor
from stable_baselines.common.callbacks import BaseCallback
from stable_baselines.common.policies import FeedForwardPolicy
from stable_baselines.results_plotter import load_results, ts2xy
from simulator import my_heuristic, network
from simulator.constants import BYTES_PER_PACKET
from simulator.trace import generate_trace, Trace, generate_traces
from common.utils import set_tf_loglevel, pcc_aurora_reward
from plot_scripts.plot_packet_log import PacketLog
from udt_plugins.testing.loaded_agent import LoadedModel
if type(tf.contrib) != types.ModuleType: # if it is LazyLoader
tf.contrib._warning = None
set_tf_loglevel(logging.FATAL)
class MyMlpPolicy(FeedForwardPolicy):
def __init__(self, sess, ob_space, ac_space, n_env, n_steps, n_batch,
reuse=False, **_kwargs):
super(MyMlpPolicy, self).__init__(sess, ob_space, ac_space, n_env,
n_steps, n_batch, reuse, net_arch=[
{"pi": [32, 16], "vf": [32, 16]}],
feature_extraction="mlp", **_kwargs)
class SaveOnBestTrainingRewardCallback(BaseCallback):
"""
Callback for saving a model (the check is done every ``check_freq`` steps)
based on the training reward (in practice, we recommend using
``EvalCallback``).
:param check_freq: (int)
:param log_dir: (str) Path to the folder where the model will be saved.
It must contains the file created by the ``Monitor`` wrapper.
:param verbose: (int)
"""
def __init__(self, aurora, check_freq: int, log_dir: str, val_traces: List = [],
verbose=0, patience=10, steps_trained=0, config_file=None,
tot_trace_cnt=100, update_training_traces_freq=5):
super(SaveOnBestTrainingRewardCallback, self).__init__(verbose)
self.aurora = aurora
self.check_freq = check_freq
self.log_dir = log_dir
# self.save_path = os.path.join(log_dir, 'saved_models')
self.save_path = log_dir
self.best_mean_reward = -np.inf
self.val_traces = val_traces
self.config_file = config_file
self.tot_trace_cnt=tot_trace_cnt
self.update_training_traces_freq = update_training_traces_freq
if self.aurora.comm.Get_rank() == 0:
self.val_log_writer = csv.writer(
open(os.path.join(log_dir, 'validation_log.csv'), 'w', 1),
delimiter='\t', lineterminator='\n')
self.val_log_writer.writerow(
['n_calls', 'num_timesteps', 'mean_validation_reward', 'loss',
'throughput', 'latency', 'sending_rate', 'tot_t_used(min)'])
else:
self.val_log_writer = None
self.best_val_reward = -np.inf
self.patience = patience
self.val_times = 0
self.t_start = time.time()
self.steps_trained = steps_trained
def _init_callback(self) -> None:
# Create folder if needed
if self.save_path is not None:
os.makedirs(self.save_path, exist_ok=True)
def _on_step(self) -> bool:
if self.n_calls % self.check_freq == 0:
# self.val_times += 1
# if self.val_times % self.update_training_traces_freq == 0:
# training_traces = generate_traces(
# self.config_file, self.tot_trace_cnt, duration=30,
# constant_bw=False)
# self.model.env.traces = training_traces
# Retrieve training reward
# x, y = ts2xy(load_results(self.log_dir), 'timesteps')
# if len(x) > 0:
# # Mean training reward over the last 100 episodes
# mean_reward = np.mean(y[-100:])
# if self.verbose > 0:
# print("Num timesteps: {}".format(self.num_timesteps))
# print("Best mean reward: {:.2f} - Last mean reward per episode: {:.2f}".format(
# self.best_mean_reward, mean_reward))
#
# # New best model, you could save the agent here
# if mean_reward > self.best_mean_reward:
# self.best_mean_reward = mean_reward
# # Example for saving best model
# if self.verbose > 0:
# print("Saving new best model to {}".format(self.save_path))
# # self.model.save(self.save_path)
if self.aurora.comm.Get_rank() == 0:
with self.model.graph.as_default():
saver = tf.train.Saver()
saver.save(
self.model.sess, os.path.join(
self.save_path, "model_step_{}.ckpt".format(
self.n_calls)))
avg_rewards = []
avg_losses = []
avg_tputs = []
avg_delays = []
avg_send_rates = []
for idx, val_trace in enumerate(self.val_traces):
# print(np.mean(val_trace.bandwidths))
ts_list, val_rewards, loss_list, tput_list, delay_list, \
send_rate_list, action_list, obs_list, mi_list, pkt_log = self.aurora.test(
val_trace, self.log_dir)
# pktlog = PacketLog.from_log(pkt_log)
avg_rewards.append(np.mean(np.array(val_rewards)))
avg_losses.append(np.mean(np.array(loss_list)))
avg_tputs.append(float(np.mean(np.array(tput_list))))
avg_delays.append(np.mean(np.array(delay_list)))
avg_send_rates.append(
float(np.mean(np.array(send_rate_list))))
# avg_rewards.append(pktlog.get_reward())
# avg_losses.append(pktlog.get_loss_rate())
# avg_tputs.append(np.mean(pktlog.get_throughput()[1]))
# avg_delays.append(np.mean(pktlog.get_rtt()[1]))
# avg_send_rates.append(np.mean(pktlog.get_sending_rate()[1]))
self.val_log_writer.writerow(
map(lambda t: "%.3f" % t,
[float(self.n_calls), float(self.num_timesteps),
np.mean(np.array(avg_rewards)),
np.mean(np.array(avg_losses)),
np.mean(np.array(avg_tputs)),
np.mean(np.array(avg_delays)),
np.mean(np.array(avg_send_rates)),
(time.time() - self.t_start) / 60]))
return True
def save_model_to_serve(model, export_dir):
if os.path.exists(export_dir):
shutil.rmtree(export_dir)
with model.graph.as_default():
pol = model.policy_pi # act_model
obs_ph = pol.obs_ph
act = pol.deterministic_action
sampled_act = pol.action
obs_input = tf.saved_model.utils.build_tensor_info(obs_ph)
outputs_tensor_info = tf.saved_model.utils.build_tensor_info(act)
stochastic_act_tensor_info = tf.saved_model.utils.build_tensor_info(
sampled_act)
signature = tf.saved_model.signature_def_utils.build_signature_def(
inputs={"ob": obs_input},
outputs={"act": outputs_tensor_info,
"stochastic_act": stochastic_act_tensor_info},
method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)
signature_map = {tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
signature}
model_builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
model_builder.add_meta_graph_and_variables(
model.sess, tags=[tf.saved_model.tag_constants.SERVING],
signature_def_map=signature_map,
clear_devices=True)
model_builder.save(as_text=True)
class Aurora():
def __init__(self, seed, log_dir, timesteps_per_actorbatch,
pretrained_model_path=None, gamma=0.99, tensorboard_log=None,
delta_scale=1):
init_start = time.time()
self.comm = COMM_WORLD
self.delta_scale = delta_scale
self.seed = seed
self.log_dir = log_dir
self.pretrained_model_path = pretrained_model_path
self.steps_trained = 0
dummy_trace = generate_trace(
(10, 10), (2, 2), (50, 50), (0, 0), (100, 100))
env = gym.make('PccNs-v0', traces=[dummy_trace],
train_flag=True, delta_scale=self.delta_scale)
# Load pretrained model
# print('create_dummy_env,{}'.format(time.time() - init_start))
if pretrained_model_path is not None:
if pretrained_model_path.endswith('.ckpt'):
model_create_start = time.time()
self.model = PPO1(MyMlpPolicy, env, verbose=1, seed=seed,
optim_stepsize=0.001, schedule='constant',
timesteps_per_actorbatch=timesteps_per_actorbatch,
optim_batchsize=int(
timesteps_per_actorbatch/4),
optim_epochs=4,
gamma=gamma, tensorboard_log=tensorboard_log, n_cpu_tf_sess=1)
# print('create_ppo1,{}'.format(time.time() - model_create_start))
tf_restore_start = time.time()
with self.model.graph.as_default():
saver = tf.train.Saver()
saver.restore(self.model.sess, pretrained_model_path)
try:
self.steps_trained = int(os.path.splitext(
pretrained_model_path)[0].split('_')[-1])
except:
self.steps_trained = 0
# print('tf_restore,{}'.format(time.time()-tf_restore_start))
else:
# model is a tensorflow model to serve
self.model = LoadedModel(pretrained_model_path)
else:
self.model = PPO1(MyMlpPolicy, env, verbose=1, seed=seed,
optim_stepsize=0.001, schedule='constant',
timesteps_per_actorbatch=timesteps_per_actorbatch,
optim_batchsize=int(timesteps_per_actorbatch/12),
optim_epochs=12,
gamma=gamma, tensorboard_log=tensorboard_log, n_cpu_tf_sess=1)
self.timesteps_per_actorbatch = timesteps_per_actorbatch
def train(self, config_file,
# training_traces, validation_traces,
total_timesteps, tot_trace_cnt,
tb_log_name=""):
assert isinstance(self.model, PPO1)
training_traces = generate_traces(config_file, tot_trace_cnt,
duration=30, constant_bw=False)
# generate validation traces
validation_traces = generate_traces(
config_file, 100, duration=30, constant_bw=False)
env = gym.make('PccNs-v0', traces=training_traces,
train_flag=True, delta_scale=self.delta_scale, config_file=config_file)
env.seed(self.seed)
# env = Monitor(env, self.log_dir)
self.model.set_env(env)
# Create the callback: check every n steps and save best model
callback = SaveOnBestTrainingRewardCallback(
self, check_freq=self.timesteps_per_actorbatch, log_dir=self.log_dir,
steps_trained=self.steps_trained, val_traces=validation_traces,
config_file=config_file, tot_trace_cnt=tot_trace_cnt,
update_training_traces_freq=10)
self.model.learn(total_timesteps=total_timesteps,
tb_log_name=tb_log_name, callback=callback)
def test_on_traces(self, traces: List[Trace], save_dirs: List[str]):
results = []
pkt_logs = []
for trace, save_dir in zip(traces, save_dirs):
ts_list, reward_list, loss_list, tput_list, delay_list, \
send_rate_list, action_list, obs_list, mi_list, pkt_log = self.test(
trace, save_dir)
result = list(zip(ts_list, reward_list, send_rate_list, tput_list,
delay_list, loss_list, action_list, obs_list, mi_list))
pkt_logs.append(pkt_log)
results.append(result)
return results, pkt_logs
# results = []
# pkt_logs = []
# n_proc=mp.cpu_count()//2
# arguments = [(self.pretrained_model_path, trace, save_dir, self.seed) for trace, save_dir in zip(traces, save_dirs)]
# with mp.Pool(processes=n_proc) as pool:
# for ts_list, reward_list, loss_list, tput_list, delay_list, \
# send_rate_list, action_list, obs_list, mi_list, pkt_log in pool.starmap(test_model, arguments):
# result = list(zip(ts_list, reward_list, send_rate_list, tput_list,
# delay_list, loss_list, action_list, obs_list, mi_list))
# pkt_logs.append(pkt_log)
# results.append(result)
# return results, pkt_logs
# results = []
# pkt_logs = []
# with MPIPoolExecutor(max_workers=4) as executor:
# iterable = ((trace, save_dir) for trace, save_dir in zip(traces, save_dirs))
# for ts_list, reward_list, loss_list, tput_list, delay_list, \
# send_rate_list, action_list, obs_list, mi_list, pkt_log in executor.starmap(self.test, iterable):
# result = list(zip(ts_list, reward_list, send_rate_list, tput_list,
# delay_list, loss_list, action_list, obs_list, mi_list))
# pkt_logs.append(pkt_log)
# results.append(result)
# return results, pkt_logs
# results = []
# pkt_logs = []
# size = self.comm.Get_size()
# count = int(len(traces) / size)
# remainder = int(len(traces) % size)
# rank = self.comm.Get_rank()
# start = rank * count + min(rank, remainder)
# stop = (rank + 1) * count + min(rank + 1, remainder)
# for i in range(start, stop):
# ts_list, reward_list, loss_list, tput_list, delay_list, \
# send_rate_list, action_list, obs_list, mi_list, pkt_log = self.test(
# traces[i], save_dirs[i])
# result = list(zip(ts_list, reward_list, send_rate_list, tput_list,
# delay_list, loss_list, action_list, obs_list, mi_list))
# pkt_logs.append(pkt_log)
# results.append(result)
# results = self.comm.gather(results, root=0)
# pkt_logs = self.comm.gather(pkt_logs, root=0)
# # need to call reduce to retrieve all return values
# return results, pkt_logs
def save_model(self):
raise NotImplementedError
def load_model(self):
raise NotImplementedError
def test(self, trace: Trace, save_dir: str):
reward_list = []
loss_list = []
tput_list = []
delay_list = []
send_rate_list = []
ts_list = []
action_list = []
mi_list = []
obs_list = []
os.makedirs(save_dir, exist_ok=True)
with open(os.path.join(save_dir, 'aurora_simulation_log.csv'), 'w', 1) as f:
writer = csv.writer(f, lineterminator='\n')
writer.writerow(['timestamp', "target_send_rate", "send_rate",
'recv_rate', 'max_recv_rate', 'latency',
'loss', 'reward', "action", "bytes_sent",
"bytes_acked", "bytes_lost", "MI",
"send_start_time",
"send_end_time", 'recv_start_time',
'recv_end_time', 'latency_increase',
"packet_size", 'min_lat', 'sent_latency_inflation',
'latency_ratio', 'send_ratio',
'bandwidth', "queue_delay",
'packet_in_queue', 'queue_size', 'cwnd',
'ssthresh', "rto", "recv_ratio"])
env = gym.make(
'PccNs-v0', traces=[trace], delta_scale=self.delta_scale)
env.seed(self.seed)
obs = env.reset()
# print(obs)
# heuristic = my_heuristic.MyHeuristic()
while True:
pred_start = time.time()
if isinstance(self.model, LoadedModel):
obs = obs.reshape(1, -1)
action = self.model.act(obs)
action = action['act'][0]
else:
if env.net.senders[0].got_data:
action, _states = self.model.predict(
obs, deterministic=True)
else:
action = np.array([0])
# print("pred,{}".format(time.time() - pred_start))
# print(env.senders[0].rate * 1500 * 8 / 1e6)
# get the new MI and stats collected in the MI
# sender_mi = env.senders[0].get_run_data()
sender_mi = env.senders[0].history.back() #get_run_data()
# if env.net.senders[0].got_data:
# action = heuristic.step(obs, sender_mi)
# # action = my_heuristic.stateless_step(env.senders[0].send_rate,
# # env.senders[0].avg_latency, env.senders[0].lat_diff, env.senders[0].start_stage,
# # env.senders[0].max_tput, env.senders[0].min_rtt, sender_mi.rtt_samples[-1])
# # action = my_heuristic.stateless_step(*obs)
# else:
# action = np.array([0])
# max_recv_rate = heuristic.max_tput
max_recv_rate = env.senders[0].max_tput
throughput = sender_mi.get("recv rate") # bits/sec
send_rate = sender_mi.get("send rate") # bits/sec
latency = sender_mi.get("avg latency")
loss = sender_mi.get("loss ratio")
avg_queue_delay = sender_mi.get('avg queue delay')
sent_latency_inflation = sender_mi.get('sent latency inflation')
latency_ratio = sender_mi.get('latency ratio')
send_ratio = sender_mi.get('send ratio')
recv_ratio = sender_mi.get('recv ratio')
reward = pcc_aurora_reward(
throughput / 8 / BYTES_PER_PACKET, latency, loss,
np.mean(trace.bandwidths) * 1e6 / 8 / BYTES_PER_PACKET, np.mean(trace.delays) * 2/ 1e3)
writer.writerow([
env.net.get_cur_time(), round(env.senders[0].rate * BYTES_PER_PACKET * 8, 0),
round(send_rate, 0), round(throughput, 0), round(max_recv_rate), latency, loss,
reward, action.item(), sender_mi.bytes_sent, sender_mi.bytes_acked,
sender_mi.bytes_lost, sender_mi.send_end - sender_mi.send_start,
sender_mi.send_start, sender_mi.send_end,
sender_mi.recv_start, sender_mi.recv_end,
sender_mi.get('latency increase'), sender_mi.packet_size,
sender_mi.get('conn min latency'), sent_latency_inflation,
latency_ratio, send_ratio,
env.links[0].get_bandwidth(
env.net.get_cur_time()) * BYTES_PER_PACKET * 8,
avg_queue_delay, env.links[0].pkt_in_queue, env.links[0].queue_size,
env.senders[0].cwnd, env.senders[0].ssthresh, env.senders[0].rto, recv_ratio])
reward_list.append(reward)
loss_list.append(loss)
delay_list.append(latency * 1000)
tput_list.append(throughput / 1e6)
send_rate_list.append(send_rate / 1e6)
ts_list.append(env.net.get_cur_time())
action_list.append(action.item())
mi_list.append(sender_mi.send_end - sender_mi.send_start)
obs_list.append(obs.tolist())
step_start = time.time()
obs, rewards, dones, info = env.step(action)
# print("step,{}".format(time.time() - step_start))
if dones:
break
with open(os.path.join(save_dir, "aurora_packet_log.csv"), 'w', 1) as f:
pkt_logger = csv.writer(f, lineterminator='\n')
pkt_logger.writerow(['timestamp', 'packet_event_id', 'event_type',
'bytes', 'cur_latency', 'queue_delay',
'packet_in_queue', 'sending_rate', 'bandwidth'])
pkt_logger.writerows(env.net.pkt_log)
return ts_list, reward_list, loss_list, tput_list, delay_list, send_rate_list, action_list, obs_list, mi_list, env.net.pkt_log
def test_model(model_path: str, trace: Trace, save_dir: str, seed: int):
model = Aurora(seed, "", 10, model_path)
pid = os.getpid()
# print(pid, 'create model')
# ret = model.test(trace, save_dir)
print(pid, 'return')
return ret
| [
"kevinxzx95@gmail.com"
] | kevinxzx95@gmail.com |
37d88f3dc7fd2bf9f1bf31b65ed2a7117f9dd1ae | 92c7311a8c145b2d415901991a459bf7d2734929 | /venv/bin/sphinx-apidoc | 59f39fc0d44d64080a0014d328825aa96e92b50c | [] | no_license | liuyanglxh/py-web | 3aa1043b672a034d548bce7042c8e0cf8faa24b2 | 441ed2077faeabf38f1449762a6ce692bb6a1115 | refs/heads/master | 2022-11-20T15:37:39.612580 | 2020-05-29T10:41:32 | 2020-05-29T10:41:32 | 267,832,787 | 0 | 0 | null | 2022-11-17T15:08:32 | 2020-05-29T10:40:30 | Python | UTF-8 | Python | false | false | 251 | #!/Users/liuyang/Documents/python/web/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sphinx.ext.apidoc import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"yang.liu@mail.dealmoon.com"
] | yang.liu@mail.dealmoon.com | |
fb3c1d8faf3f4c7f4a59af63fb46a030978ecd4e | f167dffa2f767a0419aa82bf434852069a8baeb8 | /lib/youtube_dl/extractor/einthusan.py | 4e0f8bc819c70730a476ca31cd4320cecdc25b3d | [
"MIT"
] | permissive | firsttris/plugin.video.sendtokodi | d634490b55149adfdcb62c1af1eb77568b8da3f5 | 1095c58e2bc21de4ab6fcb67a70e4f0f04febbc3 | refs/heads/master | 2023-08-18T10:10:39.544848 | 2023-08-15T17:06:44 | 2023-08-15T17:06:44 | 84,665,460 | 111 | 31 | MIT | 2022-11-11T08:05:21 | 2017-03-11T16:53:06 | Python | UTF-8 | Python | false | false | 3,720 | py | # coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..compat import (
compat_b64decode,
compat_str,
compat_urlparse,
)
from ..utils import (
extract_attributes,
ExtractorError,
get_elements_by_class,
urlencode_postdata,
)
class EinthusanIE(InfoExtractor):
_VALID_URL = r'https?://(?P<host>einthusan\.(?:tv|com|ca))/movie/watch/(?P<id>[^/?#&]+)'
_TESTS = [{
'url': 'https://einthusan.tv/movie/watch/9097/',
'md5': 'ff0f7f2065031b8a2cf13a933731c035',
'info_dict': {
'id': '9097',
'ext': 'mp4',
'title': 'Ae Dil Hai Mushkil',
'description': 'md5:33ef934c82a671a94652a9b4e54d931b',
'thumbnail': r're:^https?://.*\.jpg$',
}
}, {
'url': 'https://einthusan.tv/movie/watch/51MZ/?lang=hindi',
'only_matching': True,
}, {
'url': 'https://einthusan.com/movie/watch/9097/',
'only_matching': True,
}, {
'url': 'https://einthusan.ca/movie/watch/4E9n/?lang=hindi',
'only_matching': True,
}]
# reversed from jsoncrypto.prototype.decrypt() in einthusan-PGMovieWatcher.js
def _decrypt(self, encrypted_data, video_id):
return self._parse_json(compat_b64decode((
encrypted_data[:10] + encrypted_data[-1] + encrypted_data[12:-1]
)).decode('utf-8'), video_id)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
host = mobj.group('host')
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(r'<h3>([^<]+)</h3>', webpage, 'title')
player_params = extract_attributes(self._search_regex(
r'(<section[^>]+id="UIVideoPlayer"[^>]+>)', webpage, 'player parameters'))
page_id = self._html_search_regex(
'<html[^>]+data-pageid="([^"]+)"', webpage, 'page ID')
video_data = self._download_json(
'https://%s/ajax/movie/watch/%s/' % (host, video_id), video_id,
data=urlencode_postdata({
'xEvent': 'UIVideoPlayer.PingOutcome',
'xJson': json.dumps({
'EJOutcomes': player_params['data-ejpingables'],
'NativeHLS': False
}),
'arcVersion': 3,
'appVersion': 59,
'gorilla.csrf.Token': page_id,
}))['Data']
if isinstance(video_data, compat_str) and video_data.startswith('/ratelimited/'):
raise ExtractorError(
'Download rate reached. Please try again later.', expected=True)
ej_links = self._decrypt(video_data['EJLinks'], video_id)
formats = []
m3u8_url = ej_links.get('HLSLink')
if m3u8_url:
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, ext='mp4', entry_protocol='m3u8_native'))
mp4_url = ej_links.get('MP4Link')
if mp4_url:
formats.append({
'url': mp4_url,
})
self._sort_formats(formats)
description = get_elements_by_class('synopsis', webpage)[0]
thumbnail = self._html_search_regex(
r'''<img[^>]+src=(["'])(?P<url>(?!\1).+?/moviecovers/(?!\1).+?)\1''',
webpage, 'thumbnail url', fatal=False, group='url')
if thumbnail is not None:
thumbnail = compat_urlparse.urljoin(url, thumbnail)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
'description': description,
}
| [
"noreply@github.com"
] | noreply@github.com |
805f5305ff5d9047e6485ff2c23901ee79710e57 | a6b7dbdf66de11f6e04bac5b9da5cb2d71b8e111 | /vae/model.py | efdd7033b7af95ea32786a5a77e5c666a476d2dd | [] | no_license | gabrielwong159/tf | 9b9e144682daab3901c5cde703d39d5ee1a68b72 | bd506341034ecb47ea50a0b38040c1765003deb3 | refs/heads/master | 2021-07-03T23:50:22.391246 | 2019-03-15T10:29:25 | 2019-03-15T10:29:25 | 143,256,201 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,285 | py | import tensorflow as tf
import tensorflow.contrib.slim as slim
class VAE(object):
latent_size = 2
def __init__(self):
self.x = tf.placeholder(tf.float32, [None, 28, 28, 1])
batch_size = tf.shape(self.x)[0]
z_mean, z_log_var = VAE.encoder_network(self.x)
eps = tf.random_normal([batch_size, VAE.latent_size], mean=0., stddev=1e-2)
z = z_mean + tf.exp(z_log_var) * eps
self.z = z
logits = VAE.decoder_network(z)
self.out = tf.nn.sigmoid(logits)
labels_flat = tf.reshape(self.x, [batch_size, -1])
logits_flat = tf.reshape(logits, [batch_size, -1])
image_loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=labels_flat, logits=logits_flat)
image_loss = tf.reduce_sum(image_loss, axis=-1)
# image_loss = tf.reduce_sum(tf.squared_difference(labels_flat, logits_flat), axis=-1)
kl_loss = -0.5 * tf.reduce_sum(1.0 + 2.0*z_log_var - tf.square(z_mean) - tf.exp(2.0*z_log_var), 1)
self.loss = tf.reduce_mean(image_loss + kl_loss)
def encoder_network(x):
net = slim.conv2d(x, 64, [2, 2], scope='conv1')
net = slim.conv2d(net, 64, [2, 2], stride=2, scope='conv2')
net = slim.conv2d(net, 64, [1, 1], scope='conv3')
net = slim.conv2d(net, 64, [1, 1], scope='conv4')
net = slim.flatten(net, scope='flat')
net = slim.fully_connected(net, 128, scope='fc5')
z_mean = slim.fully_connected(net, VAE.latent_size, scope='z_mean')
z_log_var = slim.fully_connected(net, VAE.latent_size, scope='z_log_var')
return z_mean, z_log_var
def decoder_network(x):
net = slim.fully_connected(x, 128, scope='fc1')
net = slim.fully_connected(net, 128 * 14 * 14, scope='fc2')
net = tf.reshape(net, [-1, 14, 14, 128])
net = slim.conv2d_transpose(net, 64, [3, 3], scope='trans_conv3')
net = slim.conv2d_transpose(net, 64, [3, 3], scope='trans_conv4')
net = slim.conv2d_transpose(net, 64, [2, 2], stride=2, padding='VALID', scope='trans_conv5')
net = slim.conv2d_transpose(net, 1, [2, 2], activation_fn=None, scope='trans_conv6')
return net
| [
"gabrielwong159@gmail.com"
] | gabrielwong159@gmail.com |
afef5e088c4a797fddf972b908f3d05308a8a5c5 | a512b8893b0d2de827d6292e810f3a98b41e132c | /Week6/Day1/Solutions/Python/prog4.py | 8f234ad7cc815e2ff244fd79557baa2595b427a1 | [] | no_license | Audarya07/Daily-Flash-Codes | d771079fd0d470e2d3e05679f17f32fb64b4f426 | cf96ca2b1676b038e243fac67be778381492ffeb | refs/heads/master | 2022-11-06T15:37:47.180729 | 2020-06-25T16:20:55 | 2020-06-25T16:20:55 | 274,960,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | for i in range(5):
num = 5
for j in range(5):
if i>j:
print(" ",end=" ")
else:
print(num,end=" ")
num-=1
print()
| [
"audiuttarwar2000@gmail.com"
] | audiuttarwar2000@gmail.com |
1b2708b9fd69527e897aec7549fa95a9ed7fafd3 | 6d11eda98e529286c775942f63013619f37246c5 | /examples/potsdam/semantic_segmentation.py | a3ad3085dd6f3c7d8d3532839dfb3cf35057feda | [
"Apache-2.0"
] | permissive | Pandinosaurus/raster-vision-examples | 388438ddd58c2c0fd8a7eced5be02cc5518e80f8 | d6957a5de6d49fbe7d419da67979725eaab43ee7 | refs/heads/master | 2021-07-18T08:17:33.274224 | 2020-07-03T02:52:20 | 2020-07-03T02:52:20 | 184,796,275 | 1 | 0 | NOASSERTION | 2020-07-03T04:10:43 | 2019-05-03T17:38:55 | Jupyter Notebook | UTF-8 | Python | false | false | 5,610 | py | import os
from os.path import join
import rastervision as rv
from examples.utils import str_to_bool, save_image_crop
class PotsdamSemanticSegmentation(rv.ExperimentSet):
def exp_main(self, raw_uri, processed_uri, root_uri, test=False, use_tf=False):
"""Run an experiment on the ISPRS Potsdam dataset.
Uses Tensorflow Deeplab backend with Mobilenet architecture. Should get to
F1 score of ~0.86 (including clutter class) after 6 hours of training on a P3
instance.
Args:
raw_uri: (str) directory of raw data
root_uri: (str) root directory for experiment output
test: (bool) if True, run a very small experiment as a test and generate
debug output
use_tf: (bool) if True, use Tensorflow Deeplab backend.
"""
test = str_to_bool(test)
use_tf = str_to_bool(use_tf)
exp_id = 'potsdam-seg'
train_ids = ['2-10', '2-11', '3-10', '3-11', '4-10', '4-11', '4-12', '5-10',
'5-11', '5-12', '6-10', '6-11', '6-7', '6-9', '7-10', '7-11',
'7-12', '7-7', '7-8', '7-9']
val_ids = ['2-12', '3-12', '6-12']
# infrared, red, green
channel_order = [3, 0, 1]
debug = False
if test:
debug = True
train_ids = train_ids[0:1]
val_ids = val_ids[0:1]
exp_id += '-test'
classes = {
'Car': (1, '#ffff00'),
'Building': (2, '#0000ff'),
'Low Vegetation': (3, '#00ffff'),
'Tree': (4, '#00ff00'),
'Impervious': (5, "#ffffff"),
'Clutter': (6, "#ff0000")
}
task = rv.TaskConfig.builder(rv.SEMANTIC_SEGMENTATION) \
.with_chip_size(300) \
.with_classes(classes) \
.with_chip_options(window_method='sliding',
stride=300, debug_chip_probability=1.0) \
.build()
if use_tf:
batch_size = 8
num_steps = 100000
if test:
num_steps = 1
batch_size = 2
model_type = rv.MOBILENET_V2
backend = rv.BackendConfig.builder(rv.TF_DEEPLAB) \
.with_task(task) \
.with_model_defaults(model_type) \
.with_train_options(sync_interval=600) \
.with_num_steps(num_steps) \
.with_batch_size(batch_size) \
.with_debug(debug) \
.build()
else:
batch_size = 8
num_epochs = 10
if test:
batch_size = 2
num_epochs = 1
backend = rv.BackendConfig.builder(rv.PYTORCH_SEMANTIC_SEGMENTATION) \
.with_task(task) \
.with_train_options(
lr=1e-4,
batch_size=batch_size,
num_epochs=num_epochs,
model_arch='resnet50',
debug=debug) \
.build()
def make_scene(id):
id = id.replace('-', '_')
raster_uri = '{}/4_Ortho_RGBIR/top_potsdam_{}_RGBIR.tif'.format(
raw_uri, id)
label_uri = '{}/5_Labels_for_participants/top_potsdam_{}_label.tif'.format(
raw_uri, id)
if test:
crop_uri = join(
processed_uri, 'crops', os.path.basename(raster_uri))
save_image_crop(raster_uri, crop_uri, size=600)
raster_uri = crop_uri
# Using with_rgb_class_map because label TIFFs have classes encoded as RGB colors.
label_source = rv.LabelSourceConfig.builder(rv.SEMANTIC_SEGMENTATION) \
.with_rgb_class_map(task.class_map) \
.with_raster_source(label_uri) \
.build()
# URI will be injected by scene config.
# Using with_rgb(True) because we want prediction TIFFs to be in RGB format.
label_store = rv.LabelStoreConfig.builder(rv.SEMANTIC_SEGMENTATION_RASTER) \
.with_rgb(True) \
.build()
scene = rv.SceneConfig.builder() \
.with_task(task) \
.with_id(id) \
.with_raster_source(raster_uri,
channel_order=channel_order) \
.with_label_source(label_source) \
.with_label_store(label_store) \
.build()
return scene
train_scenes = [make_scene(id) for id in train_ids]
val_scenes = [make_scene(id) for id in val_ids]
dataset = rv.DatasetConfig.builder() \
.with_train_scenes(train_scenes) \
.with_validation_scenes(val_scenes) \
.build()
experiment = rv.ExperimentConfig.builder() \
.with_id(exp_id) \
.with_task(task) \
.with_backend(backend) \
.with_dataset(dataset) \
.with_root_uri(root_uri) \
.build()
return experiment
if __name__ == '__main__':
rv.main()
| [
"lewfish@gmail.com"
] | lewfish@gmail.com |
198f188e62ceb56ee34ba2c6a0977e5976402eb1 | 12ecf8911eeb150afee3925e1d3b02713478ed10 | /GenomeDecrypter/modeOneMeasureTime.py | ef860e9aee8dd9d1e201452de2d637aa0d196892 | [] | no_license | dimitreortt/Iniciacao-Cientifica---Biologia-Computacional | cbf1adfb766f9e7453690e6fe03f3f093b167caf | 74a933f756d2ea35f25224d5d9c957b15db50c64 | refs/heads/master | 2021-01-01T22:23:58.566605 | 2020-02-09T20:52:57 | 2020-02-09T20:52:57 | 239,368,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,107 | py | import sys
import os
from mpi4py import MPI
#from modeOne_paralelo_ops import *
import MOops.modeOne_paralelo_ops as ops
import MOops.results as results
comm = MPI.COMM_WORLD
size = comm.Get_size()
rank = comm.Get_rank()
#rank = 0
if rank == 0:
print ('o numero de comm.size eh: %d' % size)
outputDir = os.getcwd().replace('drivers', 'saidas/losangos/')
pathToInput = sys.argv[-1]
print(pathToInput)
#genome name
if len(sys.argv) < 2:
print ('expected inputFile as the last parameter, exiting...')
exit()
gName = pathToInput.split('/')[-1]
#alterar ops.newAlphaList() - done
alphaList, genoma1, genoma2 = ops.newAlphaList(pathToInput)
struc = (alphaList, genoma1, genoma2)
else:
struc = None
if rank%20 == 0:
print(rank)
#exit()
struc = comm.bcast(struc, root=0)
alphaList, genoma1, genoma2 = struc
#match1, match2, match3, match4, match5, match6, match7 = [getcm(alpha) for alpha in alphaList]
alpha1, alpha2, alpha3, alpha4, alpha5, alpha6, alpha7, alpha8, alpha9 = alphaList
allFirsts = []
if rank == 0:
startTime = ops.time.time()
#alpha1, alpha2, alpha3, alpha4, alpha5, alpha6, alpha7 = alphaList
#alpha1, alpha2, alpha9, alpha10 = alphaList
#lastSix = []
count = 0
'''print '\n\n',alpha1.possib, '1----\n\n'
print '\n\n',alpha2.possib, '2----\n\n'
print '\n\n',alpha3.possib, '3----\n\n'
print '\n\n',alpha4.possib, '4----\n\n'
print '\n\n',alpha5.possib, '5----\n\n'''
#exit()
for match1 in alpha1.possib:
#for match1 in ops.getcm(alpha1):
#print ops.getcm(alpha1)
for match2 in alpha2.possib:
#for match2 in ops.getcm(alpha2):
for match3 in alpha3.possib:
#for match3 in ops.getcm(alpha3):
for match4 in alpha4.possib:
#for match4 in ops.getcm(alpha4):
#matching = ops.newMatching(match1, match2, match3, match4)
#matching = [match1, match2, match3, match4]
allFirsts.append([match1, match2, match3, match4])
#print '\n\n%r\n\n' % allFirsts
#allFirsts = [[m1,m2,m3,m4], [m1,m2,m3,m4], [m1,m2,m3,m4], [m1,m2,m3,m4], [m1,m2,m3,m4].........., [m1,m2,m3,m4]] <- 6**4 desses
#print count
count += 1
#print count, 'here'
#exit()
#print len(allFirsts)
#MARKER: ATE AQUI ESTA IDILICO DE PERFEITO!
allFirsts = ops.organize(allFirsts, size)
#for item in allFirsts:
# print len(item)
#print len(allFirsts)
assert len(allFirsts) == size
print len(allFirsts[0])
#assert len(allFirsts[0]) == pow(6,4)/size
assert type(allFirsts[0]) == type([])
print type(allFirsts[0])
for alpha in alphaList:
#print alpha
pass
print 'all asserted'
#exit()
allFirsts = comm.scatter(allFirsts, root=0)
myResults = results.Results(rank)
count = 0
upSum = 0.0
upCalls = 0
#backlog = BackLog('proc%d' % rank)
for curFirst in allFirsts:
for match5 in alpha5.possib:
for match6 in alpha6.possib:
for match7 in alpha7.possib:
for match8 in alpha8.possib:
lastSix = []
for match9 in alpha9.possib:
#print count, rank
count += 1
#current
cur = []
cur = [match5, match6, match7, match8, match9]
assert len(cur) == 5
cur.extend(curFirst)
assert len(cur) == 9
lastSix.append(cur)
upStart = ops.time.time()
myResults.update(lastSix)
updateTime = ops.time.time() - upStart
upSum += updateTime
upCalls += 1
if rank %20==0:
print 'current average of rank%d is: %f' % (rank, upSum/upCalls)
print 'sum is: %f, upcalls is: %d' % (upSum, upCalls)
print 'nai'
myAverage = upSum/upCalls
myStruc = (myAverage, rank)
break
break
print 'here'
#exit()
break
break
bigStruc = comm.gather(myStruc, root=0)
if rank == 0:
outFile = open('averageTimeGetNumCIs_%dprocs' % size, 'a+')
outFile.write('Aqui serao dispostos os tempos medios de execucao de getNumCIs para tamanho 30\n')
outFile.write('num de processadores envolvidos: %d\n\n' % (size))
outFile.write('avgTime\tprocNumber\t\n')
for struc in bigStruc:
outFile.write('%f\t%d\n' % (struc[0], struc[1]))
print 'Mensuracao de tempo concluida com sucesso!'
| [
"dimiortt@gmail.com"
] | dimiortt@gmail.com |
c1596616ba13010400e6d2581bcc6100afca0493 | 9df2fb0bc59ab44f026b0a2f5ef50c72b2fb2ceb | /sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2021_10_01/operations/_maintenance_configurations_operations.py | 1ff70644846346ec7ed89b8078257c9d68991e1e | [
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] | permissive | openapi-env-test/azure-sdk-for-python | b334a2b65eeabcf9b7673879a621abb9be43b0f6 | f61090e96094cfd4f43650be1a53425736bd8985 | refs/heads/main | 2023-08-30T14:22:14.300080 | 2023-06-08T02:53:04 | 2023-06-08T02:53:04 | 222,384,897 | 1 | 0 | MIT | 2023-09-08T08:38:48 | 2019-11-18T07:09:24 | Python | UTF-8 | Python | false | false | 26,102 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_managed_cluster_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_request(
resource_group_name: str, resource_name: str, config_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
"configName": _SERIALIZER.url("config_name", config_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_request(
resource_group_name: str, resource_name: str, config_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
"configName": _SERIALIZER.url("config_name", config_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str, resource_name: str, config_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
"configName": _SERIALIZER.url("config_name", config_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
class MaintenanceConfigurationsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerservice.v2021_10_01.ContainerServiceClient`'s
:attr:`maintenance_configurations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_managed_cluster(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> Iterable["_models.MaintenanceConfiguration"]:
"""Gets a list of maintenance configurations in the specified managed cluster.
Gets a list of maintenance configurations in the specified managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MaintenanceConfiguration or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.containerservice.v2021_10_01.models.MaintenanceConfiguration]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[_models.MaintenanceConfigurationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_managed_cluster_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_managed_cluster.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("MaintenanceConfigurationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_managed_cluster.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations"
}
@distributed_trace
def get(
self, resource_group_name: str, resource_name: str, config_name: str, **kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Gets the specified maintenance configuration of a managed cluster.
Gets the specified maintenance configuration of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_10_01.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[_models.MaintenanceConfiguration] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
config_name=config_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("MaintenanceConfiguration", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}"
}
@overload
def create_or_update(
self,
resource_group_name: str,
resource_name: str,
config_name: str,
parameters: _models.MaintenanceConfiguration,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Creates or updates a maintenance configuration in the specified managed cluster.
Creates or updates a maintenance configuration in the specified managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:param parameters: The maintenance configuration to create or update. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_10_01.models.MaintenanceConfiguration
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_10_01.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self,
resource_group_name: str,
resource_name: str,
config_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Creates or updates a maintenance configuration in the specified managed cluster.
Creates or updates a maintenance configuration in the specified managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:param parameters: The maintenance configuration to create or update. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_10_01.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self,
resource_group_name: str,
resource_name: str,
config_name: str,
parameters: Union[_models.MaintenanceConfiguration, IO],
**kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Creates or updates a maintenance configuration in the specified managed cluster.
Creates or updates a maintenance configuration in the specified managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:param parameters: The maintenance configuration to create or update. Is either a
MaintenanceConfiguration type or a IO type. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_10_01.models.MaintenanceConfiguration or
IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_10_01.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.MaintenanceConfiguration] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MaintenanceConfiguration")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
config_name=config_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("MaintenanceConfiguration", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}"
}
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, config_name: str, **kwargs: Any
) -> None:
"""Deletes a maintenance configuration.
Deletes a maintenance configuration.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
config_name=config_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}"
}
| [
"noreply@github.com"
] | noreply@github.com |
e7b69c6f075b17d67552da7d91dd5b80b77ed235 | 5f0eeef355fa84b165d4e0707e8874755cc03259 | /chp02_forces/Exercise_2_10_attractrepel/Attractor.py | b265f28a642f9d31b8c7540541527dd188cd2d56 | [] | no_license | kidult00/NatureOfCode-Examples-Python | 5835fbed114f3991b9986852f31d29a0a46d7e53 | 42461590deebbe305d5815ff0d207ff974335ad5 | refs/heads/master | 2021-05-11T04:47:53.999705 | 2018-03-07T15:54:12 | 2018-03-07T15:54:12 | 117,946,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,195 | py | # The Nature of Code - Python Version
# [kidult00](https://github.com/kidult00)
# A class for a draggable attractive body in our world
# Attraction = G * M1 * M2 / Distance^2
class Attractor(object):
def __init__(self):
self.mass = 10.0 # Mass, tied to size
self.g = 1.0 # Gravitational Constant
self.position = PVector(width/2, height/2)
self.dragOffset = PVector(0.0, 0.0) # holds the offset for when object is clicked on
self.dragging = False
self.rollover = False
def attract(self, m):
force = PVector.sub(self.position, m.position) # Calculate direction of force
d = force.mag() # Distance between objects
d = constrain(d, 5.0, 25.0) # Limiting the distance to eliminate "extreme" results for very close or very far objects
force.normalize() # Normalize vector (distance doesn't matter here, we just want this vector for direction)
strength = (self.g * self.mass * m.mass) / (d * d) # Calculate gravitional force magnitude
force.mult(strength) # Get force vector --> magnitude * direction
return force
# Method to display
def display(self):
ellipseMode(CENTER)
strokeWeight(0)
stroke(0)
if self.dragging : fill(50)
elif self.rollover : fill(100)
else : fill(0)
ellipse(self.position.x, self.position.y, self.mass*6, self.mass*6)
# The methods below are for mouse interaction
def clicked(self, mx, my):
d = dist(mx, my, self.position.x, self.position.y)
if d < self.mass :
self.dragging = True
self.dragOffset.x = self.position.x - mx
self.dragOffset.y = self.position.y - my
def hover(self, mx, my):
d = dist(mx, my, self.position.x, self.position.y)
if d < self.mass : self.rollover = True
else: self.rollover = False
def stopDragging(self):
self.dragging = False
def drag(self):
if self.dragging :
self.position.x = mouseX + self.dragOffset.x
self.position.y = mouseY + self.dragOffset.y | [
"sysulj@gmail.com"
] | sysulj@gmail.com |
0f4fc7fbb6095bba090fe5261983d14f9663c9f0 | a0d26a8f908d1cd23495d2a838764ac58deeb0eb | /Week8/Date.py | 264681158473ffaa0d3d8fad169851704f2bc91a | [] | no_license | NOSC1985/UniversityWorkshopsCP1404 | 69dd606e0b6011300d7a3c93b5208a749f51c0ef | 4be286d065c9f796280b247365157077834f5413 | refs/heads/master | 2021-01-21T14:23:50.907064 | 2016-06-15T09:40:39 | 2016-06-15T09:40:39 | 58,850,024 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,923 | py | """
Class: Date
Author: Nicholas Stanton-Cook
Program Set: Week eight Workshop Tasks
This Class will store information about calendar Dates and will include the following information.
-day_of_month
-month_in_year
-year
Includes the following Methods
add_days(self, amount)
This method will increase the calendar days by the specified amount, increasing the Month and year as necessary.
also includes a String method for specific print formatting
"""
__Author__ = "Nicholas Stanton-Cook"
MONTHS_LIST = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October",
"November", "December"]
def check_leap_year(year_to_check):
"""Function: check_leap_year()
This function uses the official determination for leap years to check if any specific year will be a leap year.
By first checking if the year is evenly divisible by 4, then checking if the year is NOT evenly divisible by 100,
unless the year is evenly divisible by 400.
:param year_to_check:
:return: checker
"""
"""
if year/4 remainder = 0
if year/100 remainder = 0
checker = no
if year/400 remainder = 0
checker = yes
else
checker = yes
else
checker = no
"""
if (year_to_check % 4) == 0:
if (year_to_check % 100) == 0:
checker = 0
if (year_to_check % 400) == 0:
checker = 1
else:
checker = 1
else:
checker = 0
return checker
class Date:
def __init__(self, day, month, year):
"""
initialize the parameters, assuming error checking for original dates is done in upper level program.
Example of correct error checking in upper levels:
[sample taken from "dictionary_of_names_and_ages.py", from 'Week 7 workshop tasks']
################################################################################################################
new_name = input("Enter Name: ")
while new_name == "":
print("Name Cannot Be Blank!")
new_name = input("Enter Name: ")
error_marker = 0
while error_marker == 0:
try:
year_born = int(input("Enter Year of Birth: "))
month_born = int(input("Enter the number of the Month of Birth (between 1 and 12): "))
if (month_born > 12) or (month_born < 1):
print("Invalid Date!, Must enter a month Between 1 and 12\nPlease Try Again!")
while (month_born > 12) or (month_born < 1):
month_born = int(input("Enter the number of the Month of Birth (between 1 and 12): "))
day_born = int(input("which Day of {} was {} Born in {}: ".format(MONTHS_LIST[month_born-1], new_name,
year_born)))
error_marker = 1
except ValueError:
print("Please enter a Valid Number!")
error_marker = 0
if year_born > CURRENT_YEAR:
print("Cannot be born in the Future!!")
error_marker = 0
elif (year_born == CURRENT_YEAR) and (month_born > CURRENT_MONTH):
print("Cannot be born in the Future!!")
error_marker = 0
elif (year_born == CURRENT_YEAR) and (month_born == CURRENT_MONTH) and (day_born > CURRENT_DAY):
print("Cannot be born in the Future!!")
error_marker = 0
elif (month_born in [1, 3, 5, 7, 8, 10, 12]) and (day_born > 31):
print("Invalid Day!, {} only has 31 days!\n Please Try Again!".format(MONTHS_LIST[month_born-1]))
error_marker = 0
elif (month_born in [4, 6, 9, 11]) and (day_born > 30):
print("Invalid Day!, {} only has 30 days!\n Please Try Again!".format(MONTHS_LIST[month_born-1]))
error_marker = 0
elif (month_born == 2) and (check_leap_year(year_born) == 1) and (day_born > 29):
print("Invalid Day!, {} only has 29 days in the year {} as it is a leap year!\n Please Try Again!"
.format(MONTHS_LIST[month_born-1], year_born))
error_marker = 0
elif (month_born == 2) and (check_leap_year(year_born) == 0) and (day_born > 28):
print("Invalid Day!, {} only has 28 days in the year {} as it is not a leap year!\n Please Try Again!"
.format(MONTHS_LIST[month_born-1], year_born))
error_marker = 0
elif day_born < 1:
print("Invalid Day!, Please Enter a positive number!")
error_marker = 0
names_and_dob_dict[new_name] = [day_born, month_born, year_born]
return names_and_dob_dict
################################################################################################################
:param day:
:param month:
:param year:
:return:
"""
"""
"""
self.day = day
self.month = month
self.year = year
def __str__(self):
"""
This method formats the Date class for printing.
:return: formatted_string
"""
"""
pick strings from appropriate date formats
"""
month = MONTHS_LIST[(self.month - 1)]
if self.day in [1, 21, 31]:
string = "{}st {} {}".format(self.day, month, self.year)
elif self.day in [2, 22]:
string = "{}nd {} {}".format(self.day, month, self.year)
elif self.day in [3, 23]:
string = "{}rd {} {}".format(self.day, month, self.year)
elif self.day in [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 24, 25, 26, 27, 28, 29, 30]:
string = "{}th {} {}".format(self.day, month, self.year)
return string
def add_days(self, days_to_add):
"""
This method will increase the calendar days by the specified amount, increasing the Month and year as necessary.
:param days_to_add:
"""
"""
loop for (days_to_add)
if current_month has 31 days
if current_day < 31
add +1 to current_day
if current_day = 31
if current_month < 12
add +1 to current_month
else if current_month = 12
add +1 to current year
current_month = 1
current_day = 1
else if current_month has 30 days
if current_day < 30
add +1 to current_day
if current_day = 30
if current_month < 12
add +1 to current_month
else if current_month = 12
add +1 to current year
current_month = 1
current_day = 1
else if current_month = 2 and check_leap_year(current_year) = 1
if current_day < 29:
add +1 to current_day
elif self.day == 29:
if current_month < 12
add +1 to current_month
else if current_month = 12
add +1 to current year
current_month = 1
current_day = 1
else if current_month = 2 and check_leap_year(current_year) = 0
if current_day < 28:
add +1 to current_day
elif self.day == 28:
if current_month < 12
add +1 to current_month
else if current_month = 12
add +1 to current year
current_month = 1
current_day = 1
"""
for i in range(0, days_to_add, 1):
if self.month in [1, 3, 5, 7, 8, 10, 12]:
if self.day < 31:
self.day += 1
elif self.day == 31:
if self.month < 12:
self.month += 1
elif self.month == 12:
self.year += 1
self.month = 1
self.day = 1
elif self.month in [4, 6, 9, 11]:
if self.day < 30:
self.day += 1
elif self.day == 30:
if self.month < 12:
self.month += 1
elif self.month == 12:
self.year += 1
self.month = 1
self.day = 1
elif (self.month == 2) and (check_leap_year(self.year) == 1):
if self.day < 29:
self.day += 1
elif self.day == 29:
if self.month < 12:
self.month += 1
elif self.month == 12:
self.year += 1
self.month = 1
self.day = 1
elif (self.month == 2) and (check_leap_year(self.year) == 0):
if self.day < 28:
self.day += 1
elif self.day == 28:
if self.month < 12:
self.month += 1
elif self.month == 12:
self.year += 1
self.month = 1
self.day = 1
| [
"nicholas.stantoncook@my.jcu.edu.au"
] | nicholas.stantoncook@my.jcu.edu.au |
aa57d25f4f98ea632dc3ad4e479ab2f02967e662 | 2491620e581d8a5c20c721427b0c91625468bacd | /todo/models.py | 6bbaed49091bf0151651b13771c85fa23a235acb | [] | no_license | gaofen355/djtodo | 48e20e356158b52cb90b8291cb649e41f22b8e47 | a4d1d2538bccfddcc3d436ff506c1d3ec444f29e | refs/heads/master | 2023-01-28T00:22:24.328288 | 2020-12-10T12:37:05 | 2020-12-10T12:37:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | from django.db import models
PRIORITY = (('danger', 'high'), ('info', 'normal'), ('success', 'low'))
class TodoModel(models.Model):
title = models.CharField(max_length=100)
memo = models.TextField()
auther = models.CharField(max_length=100)
priority = models.CharField(
max_length=50,
choices=PRIORITY,
)
progress = models.PositiveSmallIntegerField()
duedate = models.DateField()
complete = models.BooleanField()
def __str__(self):
return self.title | [
"gao.fen@leadinge.co.jp"
] | gao.fen@leadinge.co.jp |
44c0b71d24dbf1b79651671039505c6dcec3cb4b | 26b2fb06a74a4bc5a3ad22ac456fcf25f304b8c2 | /array/findDiagonalOrder.py | 5b666dbbba532dca05e7cd5ac983a1ab9fa1c652 | [
"MIT"
] | permissive | ifwrite/algorithm | 8c2875400c85804884d73af0ccd466581f1583e6 | 1f2a3a7d25ebe5fd317fdde8235197138b086f89 | refs/heads/master | 2022-01-28T09:03:07.277655 | 2019-05-07T09:30:30 | 2019-05-07T09:30:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 899 | py | class Solution:
def findDiagonalOrder(self, matrix):
"""
498. Diagonal Traverse
https://leetcode.com/problems/diagonal-traverse/
"""
if not matrix: return []
i, j, k = 0, 0, 1
w, h = len(matrix), len(matrix[0])
ans = []
for x in range(w * h):
ans.append(matrix[i][j])
if k > 0:
di, dj = i - 1, j + 1
else:
di, dj = i + 1, j - 1
if 0 <= di < w and 0 <= dj < h:
i, j = di, dj
else:
if k > 0:
if j + 1 < h:
j += 1
else:
i += 1
else:
if i + 1 < w:
i += 1
else:
j += 1
k *= -1
return ans | [
"swifterfit@gmail.com"
] | swifterfit@gmail.com |
504153549247c489d9a2d6456727360a899b2db8 | 119febf1dd0263bacde9ea5f8105b12d800cdc5a | /migrations/versions/fb7cd547da7a_.py | 07f65f2619bd957782b1725302028faf5dfa3106 | [] | no_license | bogeresiraje/cinemax | b677682c27fe42c6377fcded8fbf8ba734bd528c | aaea20d06220a2a45cdc43df082d922deec97919 | refs/heads/master | 2020-04-06T10:34:03.043888 | 2018-11-14T11:29:46 | 2018-11-14T11:29:46 | 157,384,366 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 685 | py | """empty message
Revision ID: fb7cd547da7a
Revises: bd51c8ed0c45
Create Date: 2018-11-02 18:03:24.126600
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fb7cd547da7a'
down_revision = 'bd51c8ed0c45'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('show_time', 'food')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('show_time', sa.Column('food', sa.VARCHAR(length=100), autoincrement=False, nullable=True))
# ### end Alembic commands ###
| [
"hannzjavas@gmail.com"
] | hannzjavas@gmail.com |
7d6fcce96473e7cf43c76ca8bf3a9c710adf3912 | c951c43a6edeaadd824c4da2be5636d5ff4c89c1 | /sharppy/io/nucaps_decoder.py | 307ef5abfc0a3ec4d98d4f933116465353105922 | [
"BSD-3-Clause"
] | permissive | wcarthur/SHARPpy | 3e9d723835b2310ca0cb7cfa88f4b8e955dbbec3 | 357a14e87b656723204dba03cb60be031201b141 | refs/heads/master | 2023-08-17T05:45:03.900874 | 2022-07-15T21:07:01 | 2022-07-15T21:07:01 | 241,231,994 | 0 | 0 | NOASSERTION | 2020-02-17T23:46:55 | 2020-02-17T23:46:54 | null | UTF-8 | Python | false | false | 4,654 | py |
import numpy as np
import sharppy.sharptab.profile as profile
import sharppy.sharptab.prof_collection as prof_collection
from .decoder import Decoder
from sutils.utils import is_py3
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO
from datetime import datetime, timedelta
__fmtname__ = "nucaps"
__classname__ = "NUCAPSDecoder"
class NUCAPSDecoder(Decoder):
def __init__(self, file_name):
super(NUCAPSDecoder, self).__init__(file_name)
def _parse(self):
file_data = self._downloadFile()
## read in the file
data = np.array([l.strip() for l in file_data.split('\n')])
# Find the cloud info line in the text file.
cloud_list = []
for item in data:
if 'ctf_low' in item:
cloud_list.append(item)
# Attempt to unpack the list containing the cloud information.
try:
cloud_line = cloud_list[0]
cloud_flag = True
except:
cloud_flag = False
# Assign CTF and CTP to local variables.
if cloud_flag is True:
ctf_low_string = cloud_line.split(',')[0]
ctf_high_string = cloud_line.split(',')[1]
ctp_low_string = cloud_line.split(',')[2]
ctp_high_string = cloud_line.split(',')[3]
ctf_low = ctf_low_string.split(' ')[1]
ctf_high = ctf_high_string.split(' ')[1]
ctp_low = int(ctp_low_string.split(' ')[1])
ctp_high = int(ctp_high_string.split(' ')[1])
# Skew-T won't launch if cloud top pressure < 100mb.
# Set variables to dummy values so it doesn't try to draw the CTP line out of bounds.
if ctp_low < 100:
ctp_low = 3000
if ctp_high < 100:
ctp_high = 3000
else:
# Assign missing values just in case the cloud info is not in the text file.
ctf_low = -99999
ctf_high = -99999
ctp_low = 3000
ctp_high = 3000
## necessary index points
title_idx = np.where( data == '%TITLE%')[0][0]
start_idx = np.where( data == '%RAW%' )[0][0] + 1
finish_idx = np.where( data == '%END%')[0][0]
## create the plot title
data_header = data[title_idx + 1].split()
location = data_header[0]
time = datetime.strptime(data_header[1][:11], '%y%m%d/%H%M')
if len(data_header) > 2:
lat, lon = data_header[2].split(',')
lat = float(lat)
lon = float(lon)
else:
lat = 35.
lon = -97.
if time > datetime.utcnow() + timedelta(hours=1):
# If the strptime accidently makes the sounding in the future (like with SARS archive)
# i.e. a 1957 sounding becomes 2057 sounding...ensure that it's a part of the 20th century
time = datetime.strptime('19' + data_header[1][:11], '%Y%m%d/%H%M')
## put it all together for StringIO
full_data = '\n'.join(data[start_idx : finish_idx][:])
if not is_py3():
sound_data = StringIO( full_data )
else:
sound_data = BytesIO( full_data.encode() )
## read the data into arrays
p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, delimiter=',', comments="%", unpack=True )
# idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.
pres = p #[idx]
hght = h #[idx]
tmpc = T #[idx]
dwpc = Td #[idx]
wspd = wspd #[idx]
wdir = wdir #[idx]
# Br00tal hack
if hght[0] > 30000:
hght[0] = -9999.00
# Force latitude to be 35 N. Figure out a way to fix this later.
# Added cloud top parameters to profile object.
prof = profile.create_profile(profile='raw', pres=pres, hght=hght, tmpc=tmpc, dwpc=dwpc,
wdir=wdir, wspd=wspd, location=location, date=time, latitude=lat, missing=-9999.00,
ctf_low=ctf_low, ctf_high=ctf_high, ctp_low=ctp_low, ctp_high=ctp_high)
prof_coll = prof_collection.ProfCollection(
{'':[ prof ]},
[ time ],
)
prof_coll.setMeta('loc', location)
prof_coll.setMeta('observed', True)
prof_coll.setMeta('base_time', time)
prof_coll.setMeta('ctf_low', ctf_low)
prof_coll.setMeta('ctf_high', ctf_high)
prof_coll.setMeta('ctp_low', ctp_low)
prof_coll.setMeta('ctp_high', ctp_high)
return prof_coll
#if __name__ == '__main__':
# import sys
# NUCAPSDecoder(sys.argv[1])
| [
"jeff.szkodzinski@gmail.com"
] | jeff.szkodzinski@gmail.com |
61d4cb8321da42565846731f5508d076faf517b3 | 79b3f4fc9795addb1b581613d1183d6c0c3332e9 | /evaluation/ruby/first_run_gear_images_without_cache.py | c442aff97873cfc685b1316387f69fe357e63b09 | [
"Apache-2.0"
] | permissive | seveirbian/gear-framework | 2da2cdff683df3b8775857218835fd69f5fd12ef | 8d3529a9bf42e652a9d7475c9d14e9a6afc69a76 | refs/heads/master | 2022-06-24T18:24:24.871336 | 2019-11-20T07:03:35 | 2019-11-20T07:03:35 | 171,786,603 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,337 | py | import sys
# package need to be installed, pip install docker
import docker
import time
import yaml
import os
import random
import subprocess
import signal
import urllib2
import shutil
import xlwt
# package need to be installed, pip install elasticsearch
from elasticsearch import Elasticsearch
auto = False
private_registry = "202.114.10.146:9999/"
suffix = "-gear"
apppath = ""
# run paraments
hostPort = 9200
localVolume = ""
pwd = os.path.split(os.path.realpath(__file__))[0]
runEnvironment = []
runPorts = {}
runVolumes = {os.path.join(pwd, "hello.rb"): {'bind': '/hello.rb', 'mode': 'rw'},}
runWorking_dir = ""
runCommand = "ruby /hello.rb"
waitline = "hello"
# result
result = [["tag", "finishTime", "local data", "pull data", "file_num"], ]
class Runner:
def __init__(self, images):
self.images_to_pull = images
def check(self):
# detect whether the file exists, if true, delete it
if os.path.exists("./images_run.txt"):
os.remove("./images_run.txt")
def run(self):
self.check()
client = docker.from_env()
# if don't give a tag, then all image under this registry will be pulled
repos = self.images_to_pull[0]["repo"]
for repo in repos:
tags = self.images_to_pull[1][repo]
for tag in tags:
private_repo = private_registry + repo + suffix + ":" + tag
if localVolume != "":
if os.path.exists(localVolume) == False:
os.makedirs(localVolume)
print "start running: ", private_repo
# create a random name
runName = '%d' % (random.randint(1,100000000))
# get present time
startTime = time.time()
# get present net data
cnetdata = get_net_data()
# run images
container = client.containers.create(image=private_repo, environment=runEnvironment,
ports=runPorts, volumes=runVolumes, working_dir=runWorking_dir,
command=runCommand, name=runName, detach=True)
container.start()
while True:
if container.logs().find(waitline) >= 0:
break
else:
time.sleep(0.1)
pass
# print run time
finishTime = time.time() - startTime
print "finished in " , finishTime, "s"
container_path = os.path.join("/var/lib/gear/private", private_repo)
local_data = subprocess.check_output(['du','-ms', container_path]).split()[0].decode('utf-8')
print "local data: ", local_data
pull_data = get_net_data() - cnetdata
print "pull data: ", pull_data
try:
container.kill()
except:
print "kill fail!"
pass
container.remove(force=True)
# cmd = '%s kill %s' % ("docker", runName)
# rc = os.system(cmd)
# assert(rc == 0)
file_num = 0
private_path = os.path.join("/var/lib/gear/private", private_repo)
for root, dirs, files in os.walk(private_path):
for each in files:
file_num += 1
print "file numbers: ", file_num
# delete files under /var/lib/gear/public/
shutil.rmtree('/var/lib/gear/public/')
os.mkdir('/var/lib/gear/public/')
print "empty cache! \n"
# record the image and its Running time
result.append([tag, finishTime, local_data, pull_data, file_num])
if auto != True:
raw_input("Next?")
else:
time.sleep(5)
if localVolume != "":
shutil.rmtree(localVolume)
class Generator:
def __init__(self, profilePath=""):
self.profilePath = profilePath
def generateFromProfile(self):
if self.profilePath == "":
print "Error: profile path is null"
with open(self.profilePath, 'r') as f:
self.images = yaml.load(f)
return self.images
def get_net_data():
netCard = "/proc/net/dev"
fd = open(netCard, "r")
for line in fd.readlines():
if line.find("enp0s3") >= 0:
field = line.split()
data = float(field[1]) / 1024.0 / 1024.0
fd.close()
return data
if __name__ == "__main__":
if len(sys.argv) == 2:
auto = True
generator = Generator(os.path.split(os.path.realpath(__file__))[0]+"/image_versions.yaml")
images = generator.generateFromProfile()
runner = Runner(images)
runner.run()
# create a workbook sheet
workbook = xlwt.Workbook()
sheet = workbook.add_sheet("run_time")
for row in range(len(result)):
for column in range(len(result[row])):
sheet.write(row, column, result[row][column])
workbook.save(os.path.split(os.path.realpath(__file__))[0]+"/first_run_without_cache.xls") | [
"bianshengwei@gmail.com"
] | bianshengwei@gmail.com |
9b0612a4597a28b9bfac2f4dc745eb4104ab302c | 384d0be5ac54b306b945cf38c10d9b0a44c975ea | /devstack/tools/uec/meta.py | 5b845d81a69b19773c66ea4fb61a1a9065a88c47 | [] | no_license | ashokcse/openstack-bill | 05ae313637b3cfecba946d2a9b32e8c7609fc721 | 1a3d7575d4b341f64fa1764ed47e47a7504a9bcc | refs/heads/master | 2021-01-18T14:05:24.696165 | 2012-09-12T11:29:20 | 2012-09-12T11:29:20 | 5,424,267 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | import sys
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SimpleHTTPServer import SimpleHTTPRequestHandler
def main(host, port, HandlerClass = SimpleHTTPRequestHandler,
ServerClass = HTTPServer, protocol="HTTP/1.0"):
"""simple http server that listens on a give address:port"""
server_address = (host, port)
HandlerClass.protocol_version = protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
if sys.argv[1:]:
address = sys.argv[1]
else:
address = '0.0.0.0'
if ':' in address:
host, port = address.split(':')
else:
host = address
port = 8080
main(host, int(port))
| [
"ashokcse@live.com"
] | ashokcse@live.com |
d49ea65ea1d608754984e1885d288d255efbf3a9 | a8f615e6f2e00bcc72cd67475c5dd4a9ff0e6c14 | /imdemo/imdemo/pages/nodes/pin.py | 15058ecdfab3a662b795bd45d0d98c33f047f968 | [
"MIT"
] | permissive | KangWeon/arcade-imgui | fcf43f2399f56960b5249bd80e4e16d8639be8e2 | 24a8d423440cd9adaf3373a9c2492d04d8862062 | refs/heads/master | 2023-01-01T03:04:05.605347 | 2020-10-18T08:04:21 | 2020-10-18T08:04:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 349 | py | class Pin:
def __init__(self, node, name):
self.node = node
self.name = name
self.x = 0
self.y = 0
def set_position(self, pos):
self.x, self.y = pos
def get_position(self):
return (self.x, self.y)
def draw(self):
pass
class Input(Pin):
pass
class Output(Pin):
pass | [
"kurtisfields@gmail.com"
] | kurtisfields@gmail.com |
4e377d1f7248561c289f1ac5873fcfb9141ac04e | 1b0307d05796d32980685633a4aba60023cdbe9e | /models.py | 13c690a6b69843810dd87dbd2f36dc93ffe4c9fe | [] | no_license | LBWright/fun-flask | b6512279337d6272f6d3968af10198e93211f7a0 | db88e1b1fcedc465c86238fedbd3757ab31b82d7 | refs/heads/master | 2020-04-08T12:44:33.133699 | 2018-11-27T15:43:19 | 2018-11-27T15:43:19 | 159,359,659 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | from app import db
class BlogPost(db.Model):
__tablename__ = 'posts'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String, nullable=False)
description = db.Column(db.String, nullable=False)
def __init__(self, title, description):
self.title = title
self.description = description
def __repr__(self):
return f'Title: {self.title}' | [
"logan.wright@valic.com"
] | logan.wright@valic.com |
e64d6d01efc28836c6313cd98f7361ab953147d2 | 6a81e51745fc9598d24fb51e8a3d817c707435d7 | /jornalismo_de_dados_V2.0/portal/migrations/0001_initial.py | 4784abad8d02bb8d2bed218bee03d3f27c90b2db | [] | no_license | bmedeirosneto/TacProgWeb | fbd7ecb9191ef2703580edc9300d15ee71d37058 | 1daeda3b97bb901bd4f69d649c1d735c3509d9de | refs/heads/master | 2021-12-03T21:19:58.212572 | 2021-08-25T12:35:36 | 2021-08-25T12:35:36 | 226,856,253 | 0 | 13 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | # Generated by Django 2.2.7 on 2019-11-26 01:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
],
),
]
| [
"pv.mizuno@hotmail.com"
] | pv.mizuno@hotmail.com |
bf288f5f82e3d595ef0789d16294877cdc8f90df | 29a188c4767ef5fcea15fb43b94ee97397b2e35c | /JobMightNewOne.py | cd2db690ad4241a255c104d25e69379f0074134c | [] | no_license | mariapot/FirstRepo | 29bc5d666101b4fcff08972dd168eed81b0f2fea | 9906ccbbaa94e66b35312af3c6f78e8d6fc686fc | refs/heads/main | 2023-01-24T18:45:40.617943 | 2020-12-04T20:53:32 | 2020-12-04T20:53:32 | 303,777,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
# import time
driver = webdriver.Firefox()
driver.get("http://jobmight.com/session/emp/signin")
email_input: object = WebDriverWait(driver, 20).until((EC.visibility_of_element_located((By.NAME, "email"))))
email_input.send_keys("kate@gmail.com")
pass_input: object = WebDriverWait(driver, 20).until((EC.visibility_of_element_located((By.NAME, "password"))))
pass_input.send_keys("Kate20@")
# login_btn = WebDriverWait(driver, 20).until(
# (EC.element_to_be_clickable((By.XPATH, '//div[@id="root"]'))))
# login_btn.click()
login_btn = WebDriverWait(driver, 20).until((EC.element_to_be_clickable((By.XPATH,
'//div[@id="content"]/div/div/div/div/div/div[2]/div[2]/form/div[3]/div/button'))))
login_btn.click()
| [
"noreply@github.com"
] | noreply@github.com |
a1c7925648cc2d66e5c4b229965f8d80d6f98026 | bc7315f96a6ac9c16a5bf0cdd9c2721b73639fc4 | /djangoLab/wsgi.py | f1946819f0bdb5d8e0db13d9a989690411a033f8 | [] | no_license | jeffersonchaves/django_lab | e87cb7841b8498665f0dc0a546a2553517faaf2f | 50fa4fd5ae62210af9c4e8b2be863af0eec2d5e2 | refs/heads/master | 2020-05-02T22:43:32.710134 | 2019-03-28T17:37:49 | 2019-03-28T17:37:49 | 178,260,141 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
WSGI config for djangoLab project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangoLab.settings")
application = get_wsgi_application()
| [
"jeffersong.s@hotmail.com"
] | jeffersong.s@hotmail.com |
bef4ed0adc518bd890aba6eb08948e612e7755b4 | 9eaa2c64a777bd24a3cccd0230da5f81231ef612 | /study/1905/month01/code/Stage1/day04/exercise02.py | 4527340f5bf057badc200a68d1b1fcc8edce6772 | [
"MIT"
] | permissive | Dython-sky/AID1908 | 4528932f2ca66b844d8a3fcab5ed8bf84d20eb0c | 46cd54a7b36b5f009974f2bbb7005a4ad440ca1a | refs/heads/master | 2022-04-14T12:23:30.426270 | 2020-04-01T18:05:19 | 2020-04-01T18:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | """
一张纸的厚度是0.01mm
对折多少次厚度能超过珠穆朗玛峰(8844.43米)
"""
thickness = 0.01 / 1000
count = 0
while thickness <= 8848.43:
thickness *= 2
count += 1
# print(thickness)
print("一张纸对折{}次能超过珠穆朗玛峰".format(count))
| [
"dong_1998_dream@163.com"
] | dong_1998_dream@163.com |
23ecb343416fabc155c73a1bb540335f8d9e39bc | 352e6a9e339a691d8d658732016b0c38b736946c | /lab1/lab1.py | 9a610d5946e6d7bfe61d9f6f1da688948096157b | [] | no_license | guapstudent1337/COS | 16355d429f8353d88e77c6a687ea20c2e06bc46c | cdc5f55da2eb36824692803820b68e66a639325e | refs/heads/master | 2022-04-19T01:41:16.615930 | 2020-04-21T11:58:49 | 2020-04-21T11:58:49 | 257,057,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,075 | py | def split_signals(signal_amplitudes):
"""
Ищет начала и концы сигналов
:signal_amplitudes: амплитуды сигнала
:returns: словарь с началами и концами сигналов
"""
# Ищем отдельные сигналы
count = 0
start_signal = 0
end_signal = 0
start_end_siganl_dots = {}
is_siganl_detected = False
while True:
# Ищем начало сигнала
for index, signal_level in enumerate(signal_amplitudes[end_signal:]):
if (signal_level > 2) | (signal_level < -2):
start_signal = index + end_signal - 1
is_siganl_detected = True
break
# Если сигнал не замечен до конца массива, то выходим
if is_siganl_detected == False:
break
# Ищем конец сигнала
for index, signal_level in enumerate(signal_amplitudes[start_signal:]):
if (signal_level <= 2) & (signal_level >= -2):
count += 1
if count > 70:
end_signal = index + start_signal - count + 2
count = 0
break
else:
count = 0
if is_siganl_detected == True:
is_siganl_detected = False
start_end_siganl_dots[start_signal] = end_signal
return start_end_siganl_dots
def data_file_to_array(file_name):
"""
Преобразует данные из файла в массив амплитуд
:file_name: путь к файлу в формате строки
:returns: массив амплитуд
"""
import numpy as np
array = []
with open(file_name, "r") as file:
for line in file:
if 'e' in line:
int_part, power = line.strip().split('e+')
array.append(float(int_part)*(10**(int(power))))
else:
array.append(float(line.strip()))
return np.array(array)
def main():
import matplotlib.pyplot as plt
from statistics import stdev
import numpy as np
# Костанты по заданию
delta_T = 5.888*10**(-5)
M = 16
U_max = 6
file_path = "DSP_LR1_data_files/DSP_Lab_01_File_Var_10_Att_2.dat"
# Находим ряд расчётных значений
delta_U = (U_max * 2)/(2**M)
global_y = data_file_to_array(file_path)
global_x = np.array(range(0, len(global_y))) * delta_T
rms_U = stdev(global_y[:int(len(global_y)/4)] * delta_U)
# Построение графика общего сигнала
global_fig = plt.figure()
global_ax = global_fig.add_subplot(111)
global_ax.plot(global_x, global_y * delta_U)
global_ax.set_xlabel("Время, сек")
global_ax.set_ylabel("Амплитуда, В")
global_ax.text(0.3, 0.2, "Среднеквадратическое значение шума АЦП: {}".format(round(rms_U, 7)), fontsize=14,
fontweight="bold", transform=global_ax.transAxes)
# Разделение сигнала
start_end_siganl_dots = split_signals(global_y)
# Построение графиков сигналов по отдельности с их параметрами
for start in start_end_siganl_dots:
tau = start * delta_T
duration = (start_end_siganl_dots[start] - start) * delta_T
max_amplitude = max(global_y[start:start_end_siganl_dots[start]]) * delta_U
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(global_x[start:start_end_siganl_dots[start]], global_y[start:start_end_siganl_dots[start]] * delta_U)
ax.set_xlabel("Время, сек")
ax.set_ylabel("Амплитуда, В")
ax.text(0.3, 0.2, "A = {}\n\u03C4 = {}\nT = {}\n".format(max_amplitude, tau, duration), fontsize=14,
fontweight="bold", transform=ax.transAxes)
plt.show()
if __name__ == "__main__":
main()
| [
"guapstudent1337@users.noreply.github.com"
] | guapstudent1337@users.noreply.github.com |
b197533d33ff3b4d9f3b749b5dd0b0c867975e10 | d913554b362cbe96cbaa54aad1d6008242235462 | /textsnipet/textsharing/admin.py | 9b2b37dd4e8026d11d5f0bc0c20f3c1f6ae6fe9f | [] | no_license | pavanbhole123/sample-app | 8b92c61a7faaf0da1f58ca9df07ee7a584326c2c | 9d9341130bc7bf4d7df85099b314bf6ee80cee2a | refs/heads/master | 2023-04-07T15:49:44.179676 | 2021-01-05T14:51:08 | 2021-01-05T14:51:08 | 356,517,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | from django.contrib import admin
# Register your models here.
from .models import TextInfo
admin.site.register(TextInfo) | [
"pavan.bhole@fixstream.com"
] | pavan.bhole@fixstream.com |
2f93202632a69772d24c30765753aca99416cc21 | f5470f3ad79e7a049d0394d20525609f7be20dd3 | /dev/test/test_array_products.py | f33fbfab8098e9fcc963fcb0e1dd0efcffe95872 | [] | no_license | AutomatedDevOwl/array-products-python-task | 23711514cce3fa4b1fa298a2aa2ecafcab39cff1 | 66e5f03c1f1415139d6b61ba06d2ea8c6981c6f3 | refs/heads/master | 2020-07-13T03:33:00.370767 | 2019-08-28T16:55:25 | 2019-08-28T16:55:25 | 204,978,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 244 | py | import sys
import pytest
from .test_case_array_products import TestCaseWebHost
class TestWebHost(TestCaseWebHost):
def test_site88(self, example_fixture):
# Print example pytest fixture.
sys.stdout.write('hello world')
| [
"noreply@github.com"
] | noreply@github.com |
7082e9376a128de6d7830ad8452ef3ec978f1464 | fa44a1b7110d93c2ea02e99aae727f999d064135 | /Vcenter/Cluster.py | eec5680d4bafda6a539f1c2679d9c1086635103b | [
"Apache-2.0"
] | permissive | 2spmohanty/NSX-V | 47469856b8522c30c3fe9a8eb6fb691e266b34c4 | f5bdbaf28808d665653a400a705980174f71b1db | refs/heads/master | 2020-12-09T18:59:05.399097 | 2020-01-12T12:54:23 | 2020-01-12T12:54:23 | 233,389,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,472 | py | __author__ = 'smrutim'
from pyVmomi import vim
#from logging import error, warning, info, debug
from Datacenter import GetAllClusters,GetClusters,GetCluster
from VDS import wait_for_task
import time
import re
import simpleTimer
#import logging
#from threadPool import ThreadPool
"""
For Any Code changes.
Please update the READ.md file and here also for quick reference.
"""
def GetHostsInCluster(datacenter, clusterName=None, connectionState=None):
"""
Return list of host objects from given cluster name.
@param datacenter: datacenter object
@type datacenter: Vim.Datacenter
@param clusterName: cluster name
@type clusterName: string
@param connectionState: host connection state ("connected", "disconnected", "notResponding"), None means all states.
@typr connectionState: string
"""
if clusterName != None:
return GetHostsInClusters(datacenter, [clusterName], connectionState)
else:
print("clusterName is NoneType")
return
############# Added for Register VM #####################################
def GetRunningHostsInCluster(datacenter, clusterName=None, connectionState=None):
"""
Return list of host objects from given cluster name.
@param datacenter: datacenter object
@type datacenter: Vim.Datacenter
@param clusterName: cluster name
@type clusterName: string
@param connectionState: host connection state ("connected", "disconnected", "notResponding"), None means all states.
@typr connectionState: string
"""
if clusterName != None:
return GetRunningHostsInClusters(datacenter, [clusterName], connectionState)
else:
print("clusterName is NoneType")
return
def GetRunningHostsInClusters(datacenter, clusterNames=[], connectionState=None):
"""
Return list of host objects from given cluster names.
@param datacenter: datacenter object
@type datacenter: Vim.Datacenter
@param clusterNames: cluster name list
@type clusterNames: ClusterObjectMor[]
@param connectionState: host connection state ("connected", "disconnected", "notResponding"), None means all states.
@typr connectionState: string
"""
if len(clusterNames) == 0:
clusterObjs = GetAllClusters(datacenter)
else:
clusterObjs = clusterNames
hostObjs = []
if connectionState == None:
hostObjs = [h for cl in clusterObjs for h in cl.host]
else:
hostObjs = [h for cl in clusterObjs for h in cl.host if h.runtime.connectionState == connectionState and not h.runtime.inMaintenanceMode]
return hostObjs
def GetHostsInClusters(datacenter, clusterNames=[], connectionState=None):
"""
Return list of host objects from given cluster names.
@param datacenter: datacenter object
@type datacenter: Vim.Datacenter
@param clusterNames: cluster name list
@type clusterNames: string[]
@param connectionState: host connection state ("connected", "disconnected", "notResponding"), None means all states.
@typr connectionState: string
"""
if len(clusterNames) == 0:
clusterObjs = GetAllClusters(datacenter)
else:
clusterObjs = GetClusters(datacenter, clusterNames)
hostObjs = []
if connectionState == None:
hostObjs = [h for cl in clusterObjs for h in cl.host]
else:
hostObjs = [h for cl in clusterObjs for h in cl.host if h.runtime.connectionState == connectionState]
return hostObjs
| [
"2spmohanty@gmail.com"
] | 2spmohanty@gmail.com |
6bd597aadeb0f3d7450abea0a689fad71da320c9 | 4f80e032b697644c57afd6475eada5c2253bcb7a | /7.demo app/backend/landmark_detection.py | 67e13092d175e0a7ae8a56ec97f116b3534ba7ad | [] | no_license | DriverDrowsinessDetection/driver_drowsiness_detection_public | 349040079d0e73af1d756a557d61e4cdc5afc8ff | a01cf18532716e6ba0ca36416ec5046c559af7e0 | refs/heads/master | 2023-03-09T14:02:15.430008 | 2021-02-28T18:04:23 | 2021-02-28T18:04:23 | 342,894,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,515 | py | import math
from imutils import face_utils
import time
import dlib
import cv2
from numpy import mean
import numpy as np
from scipy.spatial import distance as dist
import tensorflow as tf
# define three constants.
# You can later experiment with these constants by changing them to adaptive variables.
EAR_THRESHOLD = 0.21 # eye aspect ratio to indicate blink
EAR_CONSEC_FRAMES = 3 # number of consecutive frames the eye must be below the threshold
SKIP_FIRST_FRAMES = 0 # how many frames we should skip at the beggining
# initialize output structures
scores_string = ""
#perclos var
FRAME_COUNTER = 0
EYE_CLOSED_COUNTER = 0
features_dict = {
"EAR": "",
"PERCLOS": "",
"MAR": "",
"MOE": "",
"EC": "",
"LEB": "",
"SOP": "",
"EARLYDETECTION":"",
"DROWSINESS":""
}
# define ear function
def eye_aspect_ratio(eye):
# compute the euclidean distances between the two sets of
# vertical eye landmarks (x, y)-coordinates
A = dist.euclidean(eye[1], eye[5])
B = dist.euclidean(eye[2], eye[4])
# compute the euclidean distance between the horizontal
# eye landmark (x, y)-coordinates
C = dist.euclidean(eye[0], eye[3])
# compute the eye aspect ratio
ear = (A + B) / (2.0 * C)
# return the eye aspect ratio
return ear
def mouth_aspect_ration(mouth):
A = dist.euclidean(mouth[0], mouth[1])
B = dist.euclidean(mouth[2], mouth[3])
mar = A / B
return mar
def compute_perclos(perclosList):
avg = mean(perclosList)
perclos_percentage = avg * 100
return perclos_percentage
def eye_circularity(eye):
A = dist.euclidean(eye[0], eye[1])
B = dist.euclidean(eye[1], eye[2])
C = dist.euclidean(eye[2], eye[3])
D = dist.euclidean(eye[3], eye[4])
E = dist.euclidean(eye[4], eye[5])
F = dist.euclidean(eye[5], eye[0])
eye_perimeter = A + B + C + D + E + F
diameter = dist.euclidean(eye[1], eye[4])
pupil_area = ((diameter * diameter) / 4) * math.pi
eye_circ = (4 * pupil_area * math.pi) / (eye_perimeter * eye_perimeter)
return eye_circ
def level_of_eyebrows(eye):
A = dist.euclidean(eye[0], eye[2])
B = dist.euclidean(eye[1], eye[2])
leb = (A + B) / 2
return leb
def size_of_pupil(eye):
A = dist.euclidean(eye[1], eye[4])
B = dist.euclidean(eye[0], eye[3])
sop = A / B
return sop
def get_global_variable():
global features_dict
return features_dict
def logit(x):
""" Computes the logit function, i.e. the logistic sigmoid inverse. """
if (x<=0.5):
return float(0)
if (x>=0.880797):
return float(2)
return float(- tf.math.log(1. / x - 1.))
# process a given video file
def process_video(vs, detector, predictor, scaler, subject_wise_scaler, model,
lStart=42, lEnd=48, rStart=36, rEnd=42, ear_th=0.21, consec_th=3, up_to=None):
# define necessary variables
global FRAME_COUNTER, EYE_CLOSED_COUNTER, features_dict
COUNTER = 0
TOTAL = 0
current_frame = 1
blink_start = 1
blink_end = 1
closeness = 0
output_closeness = []
output_blinks = []
blink_info = (0, 0)
perclos_list = []
perclos = 0
buffer = [] # for subject-wise calibration
sequence = [] # for lstm
while True:
# grab the frame from the threaded video file stream, resize
# it, and convert it to grayscale
# channels)
grabbed, frame = vs.read()
if not grabbed:
break
height = frame.shape[0]
weight = frame.shape[1]
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# detect faces in the grayscale frame
rects = detector(gray, 0)
# loop over the face detections
for rect in rects:
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = np.array([[p.x, p.y] for p in shape.parts()])
# loop over the (x, y)-coordinates for the facial landmarks
# and draw them on the image
for (x, y) in shape:
cv2.circle(frame, (x, y), 1, (244, 252, 129), -1)
# extract the left and right eye coordinates, then use the
# coordinates to compute the eye aspect ratio for both eyes
leftEye = shape[lStart:lEnd]
rightEye = shape[rStart:rEnd]
leftEAR = eye_aspect_ratio(leftEye)
rightEAR = eye_aspect_ratio(rightEye)
# average the eye aspect ratio together for both eyes
ear = (leftEAR + rightEAR) / 2.0
# #mouth aspect ratio
mouth = [None] * 4
mouth[0] = shape[62]
mouth[1] = shape[66]
mouth[2] = shape[60]
mouth[3] = shape[64]
mar = mouth_aspect_ration(mouth)
#mouth over eye
moe = mar/ear
#eye circularity
leftEC = eye_circularity(leftEye)
rightEC = eye_circularity(rightEye)
eye_circ = (leftEC + rightEC) / 2.0
#level of eyebrows
left_eye_leb_coordinates = [None] * 3
left_eye_leb_coordinates = [None] * 3
left_eye_leb_coordinates[0] = shape[20]
left_eye_leb_coordinates[1] = shape[21]
left_eye_leb_coordinates[2] = shape[40]
right_eye_leb_coordinates = [None] * 3
right_eye_leb_coordinates[0] = shape[22]
right_eye_leb_coordinates[1] = shape[23]
right_eye_leb_coordinates[2] = shape[42]
leftEyeLEB = level_of_eyebrows(left_eye_leb_coordinates)
rightEyeLEB = level_of_eyebrows(right_eye_leb_coordinates)
leb = (rightEyeLEB + leftEyeLEB) / 2
#size of pupil
leftEyeSOP = size_of_pupil(leftEye)
rightEyeSOP = size_of_pupil(rightEye)
sop = (leftEyeSOP + rightEyeSOP) / 2
# check to see if the eye aspect ratio is below the blink
# threshold, and if so, increment the blink frame counter
if ear < ear_th:
COUNTER += 1
EYE_CLOSED_COUNTER += 1
perclos_list.append(1)
closeness = 1
output_closeness.append(closeness)
# otherwise, the eye aspect ratio is not below the blink
# threshold
else:
# if the eyes were closed for a sufficient number of
# then increment the total number of blinks
perclos_list.append(0)
if COUNTER >= consec_th:
TOTAL += 1
blink_start = current_frame - COUNTER
blink_end = current_frame - 1
blink_info = (blink_start, blink_end)
output_blinks.append(blink_info)
# reset the eye frame counter
COUNTER = 0
closeness = 0
output_closeness.append(closeness)
features_dict["EAR"] = ear
features_dict["PERCLOS"] = perclos
features_dict["MAR"] = mar
features_dict["MOE"] = moe
features_dict["EC"] = eye_circ
features_dict["LEB"] = leb
features_dict["SOP"] = sop
# draw the total number of blinks on the frame along with
# the computed eye aspect ratio for the frame
# cv2.putText(frame, "Blinks: {}".format(TOTAL), (10, 30),
# cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
# cv2.putText(frame, "EAR: {:.2f}".format(ear), (200, 30),
# cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
# cv2.putText(frame, "PERCLOS: {:.2f}".format(perclos), (10, 30),
# cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
# cv2.putText(frame, "MAR: {:.2f}".format(mar), (10, 70),
# cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
# cv2.putText(frame, "MOE: {:.2f}".format(moe), (10, 100),
# cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
# cv2.putText(frame, "EC: {:.2f}".format(eye_circ), (10, 130),
# cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
# cv2.putText(frame, "LEB: {:.2f}".format(leb), (10, 160),
# cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
cv2.putText(frame, "FRAME: {:.2f}".format(FRAME_COUNTER), (10, 30),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
cv2.putText(frame, "DROWSINESS: {}".format(features_dict["DROWSINESS"]), (10, 60),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
cv2.putText(frame, "EARLY DETECTION: {}".format(features_dict["EARLYDETECTION"]), (10, 90),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
FRAME_COUNTER += 1
if FRAME_COUNTER >= 150:
perclos = compute_perclos(perclos_list)
EYE_CLOSED_COUNTER = 0
perclos_list.pop(0)
if FRAME_COUNTER < 240:
features_dict["DROWSINESS"] = "CALIB"
if FRAME_COUNTER < 390:
features_dict["EARLYDETECTION"] = "CALIB"
if FRAME_COUNTER >= 150 and FRAME_COUNTER<240:
buffer.append([ear, mar, moe, eye_circ, leb,sop,perclos,closeness])
if FRAME_COUNTER == 240:
subject_wise_scaler.fit(np.array(buffer))
subject_scaled = subject_wise_scaler.transform(np.array([ear, mar, moe, eye_circ, leb,sop,perclos,closeness]).reshape(1,8))
clmn_scaled = scaler.transform(subject_scaled)
sequence.append(clmn_scaled)
# yhat_drow = sk_model.predict(clmn_scaled)
# features_dict["DROWSINESS"] = yhat_drow[0]
if FRAME_COUNTER > 240 and FRAME_COUNTER < 390:
subject_scaled = subject_wise_scaler.transform(np.array([ear, mar, moe, eye_circ, leb,sop,perclos,closeness]).reshape(1,8))
clmn_scaled = scaler.transform(subject_scaled)
# clmn_scaled=subject_scaled
sequence.append(clmn_scaled)
# yhat_drow = sk_model.predict(clmn_scaled)
# features_dict["DROWSINESS"] = yhat_drow[0]
if FRAME_COUNTER >= 390:
sequence.pop(0)
subject_scaled = subject_wise_scaler.transform(np.array([ear, mar, moe, eye_circ, leb,sop,perclos,closeness]).reshape(1,8))
clmn_scaled = scaler.transform(subject_scaled)
# clmn_scaled=subject_scaled
sequence.append(clmn_scaled)
yhat = model.predict(np.array(sequence).reshape(1,150,8))
yhat_inversed = np.array([logit(x) for x in yhat])
features_dict["EARLYDETECTION"] = np.round(yhat_inversed[0], decimals=3)
# yhat_drow = sk_model.predict(clmn_scaled)
features_dict["DROWSINESS"] = 1 if ear < 0.23 else 0
# features_dict["EAR"] = clmn_scaled[0][0]
# features_dict["PERCLOS"] = clmn_scaled[0][6]
# features_dict["MAR"] = clmn_scaled[0][1]
# features_dict["MOE"] = clmn_scaled[0][2]
# features_dict["EC"] = clmn_scaled[0][3]
# features_dict["LEB"] = clmn_scaled[0][4]
# features_dict["SOP"] = clmn_scaled[0][5]
# append processed frame to list
current_frame += 1
if up_to == current_frame:
break
ret, jpeg = cv2.imencode('.jpg', frame)
frame2 = jpeg.tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame2 + b'\r\n\r\n')
def dlib_detect(vs):
# initialize dlib's face detector (HOG-based) and then create
# the facial landmark predictor
print("[INFO] loading facial landmark predictor...")
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor("input/shape_predictor_68_face_landmarks.dat")
# initialize the video stream with faster method of imutils than opencv
print("[INFO] camera sensor warming up...")
#vs = FileVideoStream('input/yawning.mp4').start()
time.sleep(1.0)
# loop over the frames from the video stream
while True:
# grab the frame from the threaded video stream, resize it to
# have a maximum width of 400 pixels, and convert it to
# grayscale
success, frame = vs.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# detect faces in the grayscale frame
rects = detector(gray, 0)
# loop over the face detections
for rect in rects:
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
# loop over the (x, y)-coordinates for the facial landmarks
# and draw them on the image
for (x, y) in shape:
cv2.circle(frame, (x, y), 1, (0, 0, 255), -1)
ret, jpeg = cv2.imencode('.jpg', frame)
frame2 = jpeg.tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame2 + b'\r\n\r\n') | [
"driverdrowsinessdetection2020@gmail.com"
] | driverdrowsinessdetection2020@gmail.com |
68067a23ea3aef03df8310a252dd5a13741112e9 | 4ecf8c53154656308b4c9397bbaa02fd77a201d7 | /cuadernosite/accounts/serializers.py | d3991381bd67589c2e05fc09efcb0189cc1f8fe9 | [] | no_license | sebastiendossot/cuaderno-project | 6acfe148e6dd957b1979b04b74b14c4a3272c17f | 727705dff0fcd8013af3a66d9e50222ea042d6e0 | refs/heads/master | 2023-01-09T18:42:35.171533 | 2020-01-31T22:23:20 | 2020-01-31T22:23:20 | 237,331,890 | 0 | 0 | null | 2022-12-06T23:10:08 | 2020-01-31T00:19:51 | JavaScript | UTF-8 | Python | false | false | 1,029 | py | from rest_framework import serializers
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from cuadernos.models import Cuaderno
# User Serializer
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email')
# Register Serializer
class RegisterSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = User.objects.create_user(validated_data['username'], validated_data['email'], validated_data['password'] )
return user
# Login Serializer
class LoginSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, data):
user = authenticate(**data)
if user and user.is_active:
return user
raise serializers.ValidationError("Incorrect Credentials") | [
"sebastien.dossot@gmail.com"
] | sebastien.dossot@gmail.com |
42792d1e385efd9b78b83f7eeaa72bf3322ff1a7 | c035ed90998ac943885f8f615991df490f041869 | /ex098.py | 265d8a358a209a59fc0d7292c18499a1630ed7fb | [] | no_license | ThiagoFTelles/python_course | 311b7370d4c7c901c205f8c8fc1cb8eaa409c4b7 | 0f74bbc8c2e16f28798131b1b8645019dd7e82eb | refs/heads/main | 2023-06-14T11:01:34.568936 | 2021-06-28T00:44:07 | 2021-06-28T00:44:07 | 380,862,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 983 | py | """
Faça um programa que tenha uma função chamada contador(), que receba três parâmetros: início, fim e passo. Seu programa tem que realizar três contagens através da função criada:
a) de 1 até 10, de 1 em 1
b) de 10 até 0, de 2 em 2
c) uma contagem personalizada
"""
from time import sleep
def contador(i, f, p):
if p < 0:
p *= -1
if p == 0:
p = 1
print('-=' * 30)
print(f'Contagem de {i} até {f} de {p} em {p}:')
if i < f:
cont = i
while cont <= f:
print(cont, end=' ')
sleep(0.5)
cont += p
print('FIM!')
else:
cont = i
while cont >= f:
print(cont, end=' ')
sleep(0.5)
cont -= p
print('FIM!')
contador(1, 10, 1)
contador(10, 0, 2)
print('Agora é sua vez de personalizar a contagem!')
inicio = int(input('Início: '))
fim = int(input('Fim: '))
passo = int(input('Passo: '))
contador(inicio, fim, passo)
| [
"atendimento.telles@gmail.com"
] | atendimento.telles@gmail.com |
311b252fcafda3be30a0ef65d230b9e80034b49b | 4b3d25e20d710442eb63ed0a655c1ae1cfe68303 | /admin/xstat.py | f3ab436a10a6b2750beeccd42aad970000f8aaa4 | [
"ISC"
] | permissive | openafs-contrib/afs-tools | ad720ae6c56a9500734eed2d84d11b7f58f01f67 | 6509810b8c66454e78514c78bb30d12281067edb | refs/heads/master | 2022-10-21T03:06:27.350718 | 2022-10-11T19:19:55 | 2022-10-14T17:56:01 | 6,767,969 | 5 | 6 | ISC | 2021-07-08T13:39:37 | 2012-11-19T21:54:11 | Perl | UTF-8 | Python | false | false | 10,758 | py | #!/usr/bin/env python
# Copyright (c) 2014-2017 Sine Nomine Associates
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THE SOFTWARE IS PROVIDED 'AS IS' AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#------------------------------------------------------------------------------
#
# Gather stats from OpenAFS file servers.
#
# This tool runs the OpenAFS rxdebug and xstat utilities to gather statisical
# information from running file servers and cache managers over the network.
#
# NOTE: This tool requires a patched version of xstat_fs_test and rxdebug which
# provide a more regular output format.
#
# Example config file:
#
# cat ~/.xstat.conf
# [logging]
# level = info
# file = /tmp/xstat.log
#
# [collect]
# destdir = /tmp/xstats
# sleep = 60
# once = no
#
# [cell0]
# cellname = example.com
# fileservers =
# 172.16.50.143
# 172.16.50.144
#
#
import os
import sys
import errno
import re
import time
import logging
import pprint
import subprocess
import signal
import ConfigParser
LOG_LEVELS = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL,
}
LOG_CONSOLE_FORMAT = '%(levelname)s %(message)s'
LOG_FILE_FORMAT = '%(asctime)s %(levelname)s %(message)s'
# Log to stderr until the log file name is read from the config.
logging.basicConfig(level=LOG_LEVELS['info'], format=LOG_CONSOLE_FORMAT)
def debug(msg):
logging.debug(msg)
def info(msg):
logging.info(msg)
def warning(msg):
logging.warning(msg)
def error(msg):
logging.error(msg)
def fatal(msg):
sys.stderr.write("ERROR: {}\n".format(msg))
logging.critical(msg)
sys.exit(1)
def setup_logging(filename, level):
# Update the logger to log to a file instead of stderr now that we have
# the log filename and level.
logger = logging.getLogger()
if filename != '-':
debug("writing log messages to file {}".format(filename))
old_handler = logger.handlers[0]
new_handler = logging.FileHandler(filename)
new_handler.setLevel(LOG_LEVELS[level])
new_handler.setFormatter(logging.Formatter(LOG_FILE_FORMAT))
logger.addHandler(new_handler)
logger.removeHandler(old_handler)
logger.setLevel(LOG_LEVELS[level])
def read_config():
"""Read the config and set defaults.
Read the config file and set defaults for any missing values.
Create a config file with default values if not found."""
filename = os.path.expanduser('~/.xstat.conf')
c = ConfigParser.SafeConfigParser()
debug("reading configuration file {}".format(filename))
c.read(filename)
if not c.has_section('logging'):
c.add_section('logging')
if not c.has_option('logging', 'level'):
c.set('logging', 'level', 'info')
if not c.has_option('logging', 'filename'):
c.set('logging', 'filename', '-') # default to stdout
if not c.has_section('collect'):
c.add_section('collect')
if not c.has_option('collect', 'destdir'):
c.set('collect', 'destdir', '/tmp/xstats')
if not c.has_option('collect', 'sleep'):
c.set('collect', 'sleep', '60')
if not c.has_option('collect', 'once'):
c.set('collect', 'once', 'no')
if not c.has_section('cell0'):
c.add_section('cell0')
if not c.has_option('cell0', 'cellname'):
c.set('cell0', 'cellname', detect_cellname())
if not c.has_option('cell0', 'fileservers'):
cellname = c.get('cell0', 'cellname')
servers = detect_fileservers(cellname) # returns a dict
addrs = [a[0] for a in servers.values()] # use primary address
c.set('cell0', 'fileservers', "\n"+"\n".join(addrs))
if not os.path.exists(filename): # Dont clobber existing config.
with open(filename, 'w') as f:
info("Writing config file {}".format(filename))
c.write(f)
return c
def mkdirp(path):
"""Make a directory with parents if it does not already exist."""
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def which(command):
"""Search for a command in the PATH."""
for path in os.environ['PATH'].split(os.pathsep):
filename = os.path.join(path, command)
if os.path.isfile(filename) and os.access(filename, os.X_OK):
return filename
error("Could not find command '{}' in PATH {}".format(command, os.environ['PATH']))
return None
def detect_cellname():
"""Detect the current cellname with the fs command.
This assumes the current host is running an OpenAFS client."""
info("Searching for cellname")
cellname = None
cmd = [which('fs'), 'wscell']
debug(subprocess.list2cmdline(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
with p.stdout:
for line in iter(p.stdout.readline, ''):
match = re.match(r"This workstation belongs to cell '([^']+)'", line)
if match:
cellname = match.group(1)
info("Cellname is {}".format(cellname))
return cellname
def detect_fileservers(cellname):
"""Detect the file servers with the vos listaddrs command."""
info("Searching for file servers in cell {}".format(cellname))
uuids = {}
uuid = None
cmd = [which('vos'), 'listaddrs', '-printuuid', '-noresolve', '-noauth', '-cell', cellname]
debug(subprocess.list2cmdline(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
with p.stdout:
for line in iter(p.stdout.readline, ''):
match = re.match(r'UUID: (\S+)+', line)
if match:
uuid = match.group(1)
uuids[uuid] = []
match = re.match(r'([\d\.]+)', line)
if match:
addr = match.group(1)
uuids[uuid].append(addr)
info("Found servers: {}".format(pprint.pformat(uuids)))
return uuids
def get_usage(command):
"""Get the command usage as a string."""
pathname = which(command)
if pathname is None:
fatal("Unable to find command '{}' in PATH.".format(command))
cmd = [pathname, '-h']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
out,code = p.communicate()
return out
def check_commands():
"""Check the required commands are available."""
usage = get_usage('rxdebug')
if not '-raw' in usage:
fatal("rxdebug is missing the '-raw' option.")
usage = get_usage('xstat_fs_test')
for option in ['-format', '-delimiter']:
if not option in usage:
fatal("xstat_fs_test is missing the '{}' option.".format(option))
def xstat_fs(host, collection, out):
"""Retrieve xstats from a server and write them to the stream."""
cmd = [which('xstat_fs_test'), host, '-once', '-co', collection, '-format', 'dsv', '-delimiter', ' ']
cmdline = subprocess.list2cmdline(cmd)
debug(cmdline)
p = subprocess.Popen(cmd, stdout=out, stderr=subprocess.PIPE)
with p.stderr:
for line in iter(p.stderr.readline, ''):
line = line.rstrip()
warning("xstat_fs_test: {}".format(line))
code = p.wait()
if code:
error("xstat_fs_test failed ({}): {}".format(code, cmdline))
def rxstats(host, port, out):
"""Retrieve rxstats from a server and write them to the stream."""
cmd = [which('rxdebug'), host, port, '-rxstats', '-noconns', '-raw']
cmdline = subprocess.list2cmdline(cmd)
timestamp = int(time.time())
debug(cmdline)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
with p.stdout:
for line in iter(p.stdout.readline, ''):
line = line.rstrip()
match = re.match(r'(\S+)\s(\S+)', line)
if match:
name = match.group(1)
value = match.group(2)
out.write("{} {} {} {} {}\n".format(timestamp, host, port, name, value))
else:
warning("rxdebug: {}".format(line))
code = p.wait()
if code:
error("rxdebug failed ({}): {}".format(code, cmdline))
running = True
def sigint_handler(signal, frame):
global running
sys.stdout.write("\nquitting...\n")
info("Signal SIGINT caught.")
running = False
def main():
global running
config = read_config()
setup_logging(config.get('logging','filename'), config.get('logging','level'))
destdir = os.path.expanduser(config.get('collect', 'destdir'))
mkdirp(destdir)
check_commands() # Exits if the required commands are missing.
info('Starting main loop.')
signal.signal(signal.SIGINT, sigint_handler)
while running:
for section in config.sections():
if section.startswith('cell'):
cellname = config.get(section, 'cellname')
servers = config.get(section, 'fileservers').strip().split()
timestamp = time.strftime('%Y-%m-%d')
filename = os.path.join(destdir, "{}-{}.dat".format(cellname, timestamp))
for server in servers:
with open(filename, 'a') as out:
try:
rxstats(server, '7000', out)
xstat_fs(server, '2', out)
xstat_fs(server, '3', out)
except Exception as e:
error("Exception: {}".format(e))
info("Wrote stats for server {} to file {}".format(server, filename))
if running:
if config.getboolean('collect', 'once'):
info("Once option set, quitting.")
running = False
else:
sleep = int(config.get('collect', 'sleep'))
debug('sleep {}'.format(sleep))
time.sleep(sleep)
info('Exiting.')
if __name__ == "__main__":
main()
| [
"mmeffie@sinenomine.net"
] | mmeffie@sinenomine.net |
8da13cd142ec6b62a14d15b73cfe977ec43475ff | a97fb0584709e292a475defc8506eeb85bb24339 | /source code/code/ch203.py | 3aa2f981b9a6399e15c03b0b1aeb0e4e562fef35 | [] | no_license | AAQ6291/PYCATCH | bd297858051042613739819ed70c535901569079 | 27ec4094be785810074be8b16ef84c85048065b5 | refs/heads/master | 2020-03-26T13:54:57.051016 | 2018-08-17T09:05:19 | 2018-08-17T09:05:19 | 144,963,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | #!/usr/bin/env python
#coding=utf-8
from __future__ import print_function
## 宣告x, y, z變數,各分別為tuple, list, dict資料型態。
x, y, z = (), [], {}
## 雖然都是空的結構,但是它們之間並不相等。
if x == y == z:
print(x, y, z, "相等")
else:
print(x, y, z, "不相等")
if x == None:
print(x, " 相等 None")
else:
print(x, " 不相等 None")
if y == None:
print(y, " 相等 None")
else:
print(y, " 不相等 None")
if z == None:
print(z, " 相等 None")
else:
print(z, " 不相等 None")
| [
"angelak.tw@gmail.com"
] | angelak.tw@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.