repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringclasses 981 values | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15 values |
|---|---|---|---|---|---|
alrifqi/django | django/contrib/sitemaps/views.py | 352 | 2953 | import datetime
from calendar import timegm
from functools import wraps
from django.contrib.sites.shortcuts import get_current_site
from django.core import urlresolvers
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.http import Http404
from django.template.response import TemplateResponse
from django.utils.http import http_date
def x_robots_tag(func):
@wraps(func)
def inner(request, *args, **kwargs):
response = func(request, *args, **kwargs)
response['X-Robots-Tag'] = 'noindex, noodp, noarchive'
return response
return inner
@x_robots_tag
def index(request, sitemaps,
template_name='sitemap_index.xml', content_type='application/xml',
sitemap_url_name='django.contrib.sitemaps.views.sitemap'):
req_protocol = request.scheme
req_site = get_current_site(request)
sites = []
for section, site in sitemaps.items():
if callable(site):
site = site()
protocol = req_protocol if site.protocol is None else site.protocol
sitemap_url = urlresolvers.reverse(
sitemap_url_name, kwargs={'section': section})
absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url)
sites.append(absolute_url)
for page in range(2, site.paginator.num_pages + 1):
sites.append('%s?p=%s' % (absolute_url, page))
return TemplateResponse(request, template_name, {'sitemaps': sites},
content_type=content_type)
@x_robots_tag
def sitemap(request, sitemaps, section=None,
template_name='sitemap.xml', content_type='application/xml'):
req_protocol = request.scheme
req_site = get_current_site(request)
if section is not None:
if section not in sitemaps:
raise Http404("No sitemap available for section: %r" % section)
maps = [sitemaps[section]]
else:
maps = sitemaps.values()
page = request.GET.get("p", 1)
urls = []
for site in maps:
try:
if callable(site):
site = site()
urls.extend(site.get_urls(page=page, site=req_site,
protocol=req_protocol))
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page '%s'" % page)
response = TemplateResponse(request, template_name, {'urlset': urls},
content_type=content_type)
if hasattr(site, 'latest_lastmod'):
# if latest_lastmod is defined for site, set header so as
# ConditionalGetMiddleware is able to send 304 NOT MODIFIED
lastmod = site.latest_lastmod
response['Last-Modified'] = http_date(
timegm(
lastmod.utctimetuple() if isinstance(lastmod, datetime.datetime)
else lastmod.timetuple()
)
)
return response
| bsd-3-clause |
alviano/wasp | tests/asp/cautious/test6.gus.gringo.cautious.asp.test.py | 1 | 20192 | input = """
1 1 1 1 63
1 1 1 1 64
1 1 2 0 65 66
1 1 2 0 67 68
1 1 2 0 69 70
1 1 2 0 71 72
1 1 2 0 66 65
1 1 2 0 68 67
1 1 2 0 70 69
1 1 2 0 72 71
1 81 1 1 66
1 82 1 1 65
1 83 1 1 68
1 84 1 1 67
1 85 1 1 70
1 86 1 1 69
1 87 1 1 72
1 88 1 1 71
1 89 1 1 66
1 90 1 1 68
1 91 1 1 67
1 92 1 1 69
1 93 2 1 68 81
1 94 1 0 81
1 95 2 1 70 82
1 96 2 1 72 82
1 97 2 1 66 83
1 98 1 0 83
1 99 1 0 84
1 100 2 1 69 84
1 101 2 1 72 85
1 102 2 1 65 85
1 103 1 0 86
1 104 2 1 67 86
1 105 2 1 70 87
1 106 2 1 65 87
1 107 2 1 65 89
1 108 2 1 67 90
1 109 2 1 68 91
1 110 2 1 70 92
1 81 2 1 68 93
1 111 2 1 67 93
1 81 1 0 94
1 82 2 1 70 95
1 111 2 1 69 95
1 82 2 1 72 96
1 112 2 1 71 96
1 83 2 1 66 97
1 113 2 1 65 97
1 83 1 0 98
1 84 1 0 99
1 113 2 1 70 100
1 84 2 1 69 100
1 85 2 1 72 101
1 114 2 1 71 101
1 115 2 1 66 102
1 85 2 1 65 102
1 86 1 0 103
1 115 2 1 68 104
1 86 2 1 67 104
1 87 2 1 70 105
1 116 2 1 69 105
1 117 2 1 66 106
1 87 2 1 65 106
1 118 2 1 70 107
1 119 2 1 72 107
1 89 2 1 65 107
1 120 1 0 108
1 90 2 1 67 108
1 118 2 1 69 108
1 121 2 1 66 109
1 91 2 1 68 109
1 122 1 0 109
1 92 2 1 70 110
1 123 2 1 72 110
1 121 2 1 65 110
1 124 1 0 111
1 93 2 1 67 111
1 95 2 1 69 111
1 96 2 1 71 112
1 100 2 1 70 113
1 125 2 1 72 113
1 97 2 1 65 113
1 101 2 1 71 114
1 102 2 1 66 115
1 104 2 1 68 115
1 126 1 0 115
1 127 1 0 116
1 128 2 1 67 116
1 105 2 1 69 116
1 106 2 1 66 117
1 128 2 1 68 117
1 129 1 0 117
1 107 2 1 70 118
1 108 2 1 69 118
1 107 2 1 72 119
1 130 2 1 71 119
1 108 1 0 120
1 109 2 1 66 121
1 110 2 1 65 121
1 109 1 0 122
1 110 2 1 72 123
1 131 2 1 71 123
1 111 1 0 124
1 113 2 1 72 125
1 132 2 1 71 125
1 115 1 0 126
1 116 1 0 127
1 117 2 1 68 128
1 116 2 1 67 128
1 117 1 0 129
1 119 2 1 71 130
1 123 2 1 71 131
1 125 2 1 71 132
1 139 1 1 120
1 140 1 1 122
1 141 1 1 94
1 142 1 1 124
1 143 1 1 98
1 144 1 1 99
1 145 1 1 126
1 146 1 1 103
1 147 1 1 129
1 148 1 1 127
1 149 2 1 108 139
1 150 2 1 109 140
1 151 2 1 81 141
1 152 2 1 111 142
1 153 2 1 83 143
1 154 2 1 84 144
1 155 2 1 115 145
1 156 2 1 86 146
1 157 2 1 117 147
1 158 2 1 116 148
1 139 2 1 120 149
1 159 2 1 90 149
1 160 2 1 118 149
1 161 2 1 121 150
1 162 2 1 91 150
1 140 2 1 122 150
1 163 2 1 93 151
1 141 2 1 94 151
1 142 2 1 124 152
1 163 2 1 93 152
1 164 2 1 95 152
1 165 2 1 97 153
1 143 2 1 98 153
1 144 2 1 99 154
1 166 2 1 100 154
1 167 2 1 102 155
1 168 2 1 104 155
1 145 2 1 126 155
1 146 2 1 103 156
1 168 2 1 104 156
1 169 2 1 106 157
1 170 2 1 128 157
1 147 2 1 129 157
1 148 2 1 127 158
1 170 2 1 128 158
1 219 2 1 165 179
1 220 2 1 167 179
1 221 2 1 169 179
1 222 2 1 161 179
1 223 2 1 163 159
1 224 1 0 159
1 225 2 1 168 159
1 226 2 1 170 159
1 227 2 1 162 159
1 228 2 1 163 162
1 229 1 0 162
1 230 2 1 168 162
1 231 2 1 170 162
1 232 2 1 168 163
1 233 2 1 170 163
1 234 1 0 163
1 235 2 1 170 168
1 236 2 1 164 160
1 237 2 1 166 160
1 238 1 0 160
1 239 2 1 171 160
1 240 2 1 180 160
1 241 1 0 164
1 242 2 1 171 164
1 243 2 1 166 164
1 244 1 0 166
1 245 2 1 171 166
1 246 2 1 164 180
1 247 2 1 166 180
1 248 1 0 180
1 249 2 1 171 180
1 250 2 1 182 178
1 251 2 1 183 178
1 252 2 1 184 178
1 253 1 0 178
1 254 2 1 181 178
1 255 2 1 182 181
1 256 2 1 183 181
1 257 2 1 184 181
1 258 1 0 181
1 259 2 1 184 182
1 260 1 0 182
1 261 2 1 183 182
1 262 2 1 184 183
1 263 1 0 183
1 264 1 0 184
1 265 1 0 140
1 266 1 0 141
1 293 2 1 146 148
1 294 2 1 179 161
1 295 2 1 179 165
1 296 2 1 161 165
1 297 1 0 165
1 298 2 1 179 167
1 299 2 1 161 167
1 300 1 0 167
1 301 2 1 165 167
1 302 2 1 179 169
1 303 2 1 161 169
1 304 1 0 169
1 305 2 1 165 169
1 306 2 1 167 169
1 307 2 1 159 162
1 308 2 1 159 163
1 309 2 1 162 163
1 310 2 1 159 168
1 311 2 1 162 168
1 312 2 1 163 168
1 313 1 0 168
1 314 2 1 159 170
1 315 2 1 162 170
1 316 2 1 163 170
1 317 1 0 170
1 318 2 1 168 170
1 319 2 1 160 164
1 320 2 1 180 164
1 321 2 1 160 166
1 322 2 1 180 166
1 323 2 1 164 166
1 324 2 1 160 171
1 325 2 1 180 171
1 326 2 1 164 171
1 327 2 1 166 171
1 328 1 0 171
1 329 2 1 160 180
1 330 2 1 178 181
1 331 2 1 178 182
1 332 2 1 181 182
1 333 2 1 178 183
1 334 2 1 181 183
1 335 2 1 182 183
1 336 2 1 178 184
1 337 2 1 181 184
1 338 2 1 182 184
1 339 2 1 183 184
1 340 1 0 200
1 341 1 0 218
1 342 1 0 223
1 343 1 0 236
1 344 1 0 250
5 345 627 10 5 340 341 342 343 344 266 280 308 319 331 65 155 129 78 200 57 65 129 78 200
1 346 1 1 345
1 347 1 0 346
1 348 1 0 190
1 349 1 0 211
1 350 1 0 228
1 351 1 0 246
1 352 1 0 255
5 353 619 10 5 348 349 350 351 352 267 281 309 320 332 57 155 129 78 200 57 65 129 78 200
1 354 1 1 353
1 355 1 0 354
1 356 1 0 201
1 357 1 0 219
1 358 1 0 224
1 359 1 0 237
1 360 1 0 251
5 361 627 10 5 356 357 358 359 360 268 282 295 321 333 65 155 129 78 200 57 65 155 78 200
1 362 1 1 361
1 347 1 0 362
1 363 1 0 191
1 364 1 0 212
1 365 1 0 229
1 366 1 0 247
1 367 1 0 256
5 368 619 10 5 363 364 365 366 367 269 283 296 322 334 57 155 129 78 200 57 65 155 78 200
1 369 1 1 368
1 355 1 0 369
1 370 1 0 202
1 371 1 0 220
1 372 1 0 225
1 373 1 0 238
1 374 1 0 252
5 375 627 10 5 370 371 372 373 374 271 285 298 310 336 65 155 129 78 200 57 65 155 129 200
1 376 1 1 375
1 347 1 0 376
1 377 1 0 192
1 378 1 0 213
1 379 1 0 230
1 380 1 0 248
1 381 1 0 257
5 382 619 10 5 377 378 379 380 381 272 286 299 311 337 57 155 129 78 200 57 65 155 129 200
1 383 1 1 382
1 355 1 0 383
1 384 1 0 194
1 385 1 0 205
1 386 1 0 232
1 387 1 0 241
1 388 1 0 259
5 389 529 10 5 384 385 386 387 388 273 287 300 312 338 57 65 129 78 200 57 65 155 129 200
1 390 1 1 389
1 391 1 0 390
1 392 1 0 197
1 393 1 0 208
1 394 1 0 215
1 395 1 0 244
1 396 1 0 262
5 397 555 10 5 392 393 394 395 396 274 288 301 313 339 57 65 155 78 200 57 65 155 129 200
1 398 1 1 397
1 399 1 0 398
1 400 1 0 203
1 401 1 0 221
1 402 1 0 226
1 403 1 0 239
1 404 1 0 253
5 405 627 10 5 400 401 402 403 404 275 289 302 314 324 65 155 129 78 200 57 65 155 129 78
1 406 1 1 405
1 347 1 0 406
1 407 1 0 193
1 408 1 0 214
1 409 1 0 231
1 410 1 0 249
1 411 1 0 258
5 412 619 10 5 407 408 409 410 411 276 290 303 315 325 57 155 129 78 200 57 65 155 129 78
1 413 1 1 412
1 355 1 0 413
1 414 1 0 195
1 415 1 0 206
1 416 1 0 233
1 417 1 0 242
1 418 1 0 260
5 419 529 10 5 414 415 416 417 418 277 291 304 316 326 57 65 129 78 200 57 65 155 129 78
1 420 1 1 419
1 391 1 0 420
1 421 1 0 198
1 422 1 0 209
1 423 1 0 216
1 424 1 0 245
1 425 1 0 263
5 426 555 10 5 421 422 423 424 425 278 292 305 317 327 57 65 155 78 200 57 65 155 129 78
1 427 1 1 426
1 399 1 0 427
1 428 1 0 204
1 429 1 0 222
1 430 1 0 227
1 431 1 0 240
1 432 1 0 254
5 433 627 10 5 428 429 430 431 432 265 294 307 329 330 65 155 129 78 200 57 155 129 78 200
1 434 1 1 433
1 347 1 0 434
1 435 1 0 196
1 436 1 0 207
1 437 1 0 234
1 438 1 0 243
1 439 1 0 261
5 440 529 10 5 435 436 437 438 439 270 284 297 323 335 57 65 129 78 200 57 65 155 78 200
1 441 1 1 440
1 391 1 0 441
1 442 1 0 199
1 443 1 0 210
1 444 1 0 217
1 445 1 0 235
1 446 1 0 264
5 447 606 10 5 442 443 444 445 446 279 293 306 318 328 57 65 155 129 200 57 65 155 129 78
1 448 1 1 447
1 449 1 0 448
1 391 1 1 347
1 391 1 1 355
1 399 1 1 347
1 399 1 1 355
1 449 1 1 347
1 449 1 1 355
1 449 1 1 391
1 449 1 1 399
1 450 1 1 347
1 450 1 1 355
1 450 1 1 391
1 450 1 1 399
1 355 1 1 347
1 399 1 1 391
1 450 1 1 449
1 451 2 1 355 140
1 451 2 1 391 141
1 451 2 1 399 143
1 451 2 1 449 145
1 451 2 1 450 147
1 452 2 1 347 139
1 452 2 1 391 142
1 452 2 1 399 144
1 452 2 1 449 146
1 452 2 1 450 148
1 453 2 1 355 161
1 453 2 1 399 165
1 453 2 1 449 167
1 453 2 1 450 169
1 453 2 1 347 179
1 454 2 1 347 159
1 454 2 1 355 162
1 454 2 1 391 163
1 454 2 1 449 168
1 454 2 1 450 170
1 455 2 1 347 160
1 455 2 1 391 164
1 455 2 1 399 166
1 455 2 1 450 171
1 455 2 1 355 180
1 456 2 1 347 178
1 456 2 1 355 181
1 456 2 1 391 182
1 456 2 1 399 183
1 456 2 1 449 184
3 10 63 64 66 68 70 72 65 67 69 71 0 0
2 457 10 0 4 63 64 66 68 70 72 65 67 69 71
2 458 10 0 5 63 64 66 68 70 72 65 67 69 71
1 459 2 1 458 457
1 1 1 1 459
0
10 pipe(1,2)
11 pipe(1,4)
12 pipe(2,3)
13 pipe(2,4)
14 pipe(3,4)
15 pipe(3,5)
22 swap(pipe(1,2),pipe(1,2))
23 swap(pipe(1,4),pipe(1,4))
24 swap(pipe(2,3),pipe(2,3))
25 swap(pipe(2,4),pipe(2,4))
26 swap(pipe(3,4),pipe(3,4))
27 swap(pipe(3,5),pipe(3,5))
28 swap(pipe(1,2),pipe(2,1))
29 swap(pipe(1,4),pipe(4,1))
30 swap(pipe(2,3),pipe(3,2))
31 swap(pipe(2,4),pipe(4,2))
32 swap(pipe(3,4),pipe(4,3))
33 swap(pipe(3,5),pipe(5,3))
34 symm_pipe(1,2)
35 symm_pipe(1,4)
36 symm_pipe(2,3)
37 symm_pipe(2,4)
38 symm_pipe(3,4)
39 symm_pipe(3,5)
40 symm_pipe(2,1)
41 symm_pipe(4,1)
42 symm_pipe(3,2)
43 symm_pipe(4,2)
44 symm_pipe(4,3)
45 symm_pipe(5,3)
46 less_ico(pipe(1,2),pipe(2,3))
47 less_ico(pipe(1,4),pipe(2,3))
48 less_ico(pipe(1,2),pipe(2,4))
49 less_ico(pipe(1,4),pipe(2,4))
50 less_ico(pipe(1,2),pipe(3,4))
51 less_ico(pipe(1,4),pipe(3,4))
52 less_ico(pipe(2,3),pipe(3,4))
53 less_ico(pipe(2,4),pipe(3,4))
54 less_ico(pipe(1,2),pipe(3,5))
55 less_ico(pipe(1,4),pipe(3,5))
56 less_ico(pipe(2,3),pipe(3,5))
57 less_ico(pipe(2,4),pipe(3,5))
58 less_ico(pipe(1,2),pipe(1,4))
59 less_ico(pipe(2,3),pipe(2,4))
60 less_ico(pipe(3,4),pipe(3,5))
9 tank(1)
3 valves_per_pipe(1)
61 drop(2,1)
62 drop(4,1)
2 valves_number(4)
63 valve(1,2)
64 valve(1,4)
66 valve(2,3)
68 valve(2,4)
70 valve(3,4)
72 valve(3,5)
65 valve(3,2)
67 valve(4,2)
69 valve(4,3)
71 valve(5,3)
73 broken(pipe(1,2),pipe(1,2))
74 broken(pipe(1,4),pipe(1,4))
75 broken(pipe(2,3),pipe(2,3))
76 broken(pipe(2,4),pipe(2,4))
77 broken(pipe(3,4),pipe(3,4))
78 broken(pipe(3,5),pipe(3,5))
89 broken(pipe(1,2),pipe(2,3))
90 broken(pipe(1,2),pipe(2,4))
91 broken(pipe(1,4),pipe(2,4))
92 broken(pipe(1,4),pipe(3,4))
93 broken(pipe(2,3),pipe(2,4))
94 broken(pipe(2,3),pipe(1,2))
95 broken(pipe(2,3),pipe(3,4))
96 broken(pipe(2,3),pipe(3,5))
97 broken(pipe(2,4),pipe(2,3))
98 broken(pipe(2,4),pipe(1,2))
99 broken(pipe(2,4),pipe(1,4))
100 broken(pipe(2,4),pipe(3,4))
101 broken(pipe(3,4),pipe(3,5))
102 broken(pipe(3,4),pipe(2,3))
103 broken(pipe(3,4),pipe(1,4))
104 broken(pipe(3,4),pipe(2,4))
105 broken(pipe(3,5),pipe(3,4))
106 broken(pipe(3,5),pipe(2,3))
118 broken(pipe(1,2),pipe(3,4))
119 broken(pipe(1,2),pipe(3,5))
120 broken(pipe(1,2),pipe(1,4))
121 broken(pipe(1,4),pipe(2,3))
122 broken(pipe(1,4),pipe(1,2))
123 broken(pipe(1,4),pipe(3,5))
124 broken(pipe(2,3),pipe(1,4))
125 broken(pipe(2,4),pipe(3,5))
126 broken(pipe(3,4),pipe(1,2))
127 broken(pipe(3,5),pipe(1,4))
128 broken(pipe(3,5),pipe(2,4))
129 broken(pipe(3,5),pipe(1,2))
79 extend(pipe(1,2),2)
80 extend(pipe(1,4),4)
81 extend(pipe(2,3),2)
82 extend(pipe(2,3),3)
83 extend(pipe(2,4),2)
84 extend(pipe(2,4),4)
85 extend(pipe(3,4),3)
86 extend(pipe(3,4),4)
87 extend(pipe(3,5),3)
88 extend(pipe(3,5),5)
107 extend(pipe(1,2),3)
108 extend(pipe(1,2),4)
109 extend(pipe(1,4),2)
110 extend(pipe(1,4),3)
111 extend(pipe(2,3),4)
112 extend(pipe(2,3),5)
113 extend(pipe(2,4),3)
114 extend(pipe(3,4),5)
115 extend(pipe(3,4),2)
116 extend(pipe(3,5),4)
117 extend(pipe(3,5),2)
130 extend(pipe(1,2),5)
131 extend(pipe(1,4),5)
132 extend(pipe(2,4),5)
133 reached(pipe(1,2),1)
134 reached(pipe(1,4),1)
135 reached(pipe(2,3),1)
136 reached(pipe(2,4),1)
137 reached(pipe(3,4),1)
138 reached(pipe(3,5),1)
149 reached(pipe(1,2),4)
150 reached(pipe(1,4),2)
151 reached(pipe(2,3),2)
152 reached(pipe(2,3),4)
153 reached(pipe(2,4),2)
154 reached(pipe(2,4),4)
155 reached(pipe(3,4),2)
156 reached(pipe(3,4),4)
157 reached(pipe(3,5),2)
158 reached(pipe(3,5),4)
172 reached(pipe(1,2),3)
173 reached(pipe(1,4),3)
174 reached(pipe(2,3),3)
175 reached(pipe(2,4),3)
176 reached(pipe(3,4),3)
177 reached(pipe(3,5),3)
185 reached(pipe(1,2),5)
186 reached(pipe(1,4),5)
187 reached(pipe(2,3),5)
188 reached(pipe(2,4),5)
189 reached(pipe(3,4),5)
139 deliver(pipe(1,2),pipe(1,4))
140 deliver(pipe(1,4),pipe(1,2))
141 deliver(pipe(2,3),pipe(1,2))
142 deliver(pipe(2,3),pipe(1,4))
143 deliver(pipe(2,4),pipe(1,2))
144 deliver(pipe(2,4),pipe(1,4))
145 deliver(pipe(3,4),pipe(1,2))
146 deliver(pipe(3,4),pipe(1,4))
147 deliver(pipe(3,5),pipe(1,2))
148 deliver(pipe(3,5),pipe(1,4))
159 deliver(pipe(1,2),pipe(2,4))
160 deliver(pipe(1,2),pipe(3,4))
161 deliver(pipe(1,4),pipe(2,3))
162 deliver(pipe(1,4),pipe(2,4))
163 deliver(pipe(2,3),pipe(2,4))
164 deliver(pipe(2,3),pipe(3,4))
165 deliver(pipe(2,4),pipe(2,3))
166 deliver(pipe(2,4),pipe(3,4))
167 deliver(pipe(3,4),pipe(2,3))
168 deliver(pipe(3,4),pipe(2,4))
169 deliver(pipe(3,5),pipe(2,3))
170 deliver(pipe(3,5),pipe(2,4))
171 deliver(pipe(3,5),pipe(3,4))
178 deliver(pipe(1,2),pipe(3,5))
179 deliver(pipe(1,2),pipe(2,3))
180 deliver(pipe(1,4),pipe(3,4))
181 deliver(pipe(1,4),pipe(3,5))
182 deliver(pipe(2,3),pipe(3,5))
183 deliver(pipe(2,4),pipe(3,5))
184 deliver(pipe(3,4),pipe(3,5))
16 dem(1,2,57)
17 dem(1,4,65)
18 dem(2,3,155)
19 dem(2,4,129)
20 dem(3,4,78)
21 dem(3,5,200)
190 compare(pipe(1,4),pipe(2,3),pipe(1,2),-57)
191 compare(pipe(1,4),pipe(2,4),pipe(1,2),-57)
192 compare(pipe(1,4),pipe(3,4),pipe(1,2),-57)
193 compare(pipe(1,4),pipe(3,5),pipe(1,2),-57)
194 compare(pipe(2,3),pipe(3,4),pipe(1,2),-57)
195 compare(pipe(2,3),pipe(3,5),pipe(1,2),-57)
196 compare(pipe(2,3),pipe(2,4),pipe(1,2),-57)
197 compare(pipe(2,4),pipe(3,4),pipe(1,2),-57)
198 compare(pipe(2,4),pipe(3,5),pipe(1,2),-57)
199 compare(pipe(3,4),pipe(3,5),pipe(1,2),-57)
200 compare(pipe(1,2),pipe(2,3),pipe(1,4),-65)
201 compare(pipe(1,2),pipe(2,4),pipe(1,4),-65)
202 compare(pipe(1,2),pipe(3,4),pipe(1,4),-65)
203 compare(pipe(1,2),pipe(3,5),pipe(1,4),-65)
204 compare(pipe(1,2),pipe(1,4),pipe(1,4),-65)
205 compare(pipe(2,3),pipe(3,4),pipe(1,4),-65)
206 compare(pipe(2,3),pipe(3,5),pipe(1,4),-65)
207 compare(pipe(2,3),pipe(2,4),pipe(1,4),-65)
208 compare(pipe(2,4),pipe(3,4),pipe(1,4),-65)
209 compare(pipe(2,4),pipe(3,5),pipe(1,4),-65)
210 compare(pipe(3,4),pipe(3,5),pipe(1,4),-65)
211 compare(pipe(1,4),pipe(2,3),pipe(2,3),-155)
212 compare(pipe(1,4),pipe(2,4),pipe(2,3),-155)
213 compare(pipe(1,4),pipe(3,4),pipe(2,3),-155)
214 compare(pipe(1,4),pipe(3,5),pipe(2,3),-155)
215 compare(pipe(2,4),pipe(3,4),pipe(2,3),-155)
216 compare(pipe(2,4),pipe(3,5),pipe(2,3),-155)
217 compare(pipe(3,4),pipe(3,5),pipe(2,3),-155)
218 compare(pipe(1,2),pipe(2,3),pipe(2,3),-155)
219 compare(pipe(1,2),pipe(2,4),pipe(2,3),-155)
220 compare(pipe(1,2),pipe(3,4),pipe(2,3),-155)
221 compare(pipe(1,2),pipe(3,5),pipe(2,3),-155)
222 compare(pipe(1,2),pipe(1,4),pipe(2,3),-155)
223 compare(pipe(1,2),pipe(2,3),pipe(2,4),-129)
224 compare(pipe(1,2),pipe(2,4),pipe(2,4),-129)
225 compare(pipe(1,2),pipe(3,4),pipe(2,4),-129)
226 compare(pipe(1,2),pipe(3,5),pipe(2,4),-129)
227 compare(pipe(1,2),pipe(1,4),pipe(2,4),-129)
228 compare(pipe(1,4),pipe(2,3),pipe(2,4),-129)
229 compare(pipe(1,4),pipe(2,4),pipe(2,4),-129)
230 compare(pipe(1,4),pipe(3,4),pipe(2,4),-129)
231 compare(pipe(1,4),pipe(3,5),pipe(2,4),-129)
232 compare(pipe(2,3),pipe(3,4),pipe(2,4),-129)
233 compare(pipe(2,3),pipe(3,5),pipe(2,4),-129)
234 compare(pipe(2,3),pipe(2,4),pipe(2,4),-129)
235 compare(pipe(3,4),pipe(3,5),pipe(2,4),-129)
236 compare(pipe(1,2),pipe(2,3),pipe(3,4),-78)
237 compare(pipe(1,2),pipe(2,4),pipe(3,4),-78)
238 compare(pipe(1,2),pipe(3,4),pipe(3,4),-78)
239 compare(pipe(1,2),pipe(3,5),pipe(3,4),-78)
240 compare(pipe(1,2),pipe(1,4),pipe(3,4),-78)
241 compare(pipe(2,3),pipe(3,4),pipe(3,4),-78)
242 compare(pipe(2,3),pipe(3,5),pipe(3,4),-78)
243 compare(pipe(2,3),pipe(2,4),pipe(3,4),-78)
244 compare(pipe(2,4),pipe(3,4),pipe(3,4),-78)
245 compare(pipe(2,4),pipe(3,5),pipe(3,4),-78)
246 compare(pipe(1,4),pipe(2,3),pipe(3,4),-78)
247 compare(pipe(1,4),pipe(2,4),pipe(3,4),-78)
248 compare(pipe(1,4),pipe(3,4),pipe(3,4),-78)
249 compare(pipe(1,4),pipe(3,5),pipe(3,4),-78)
250 compare(pipe(1,2),pipe(2,3),pipe(3,5),-200)
251 compare(pipe(1,2),pipe(2,4),pipe(3,5),-200)
252 compare(pipe(1,2),pipe(3,4),pipe(3,5),-200)
253 compare(pipe(1,2),pipe(3,5),pipe(3,5),-200)
254 compare(pipe(1,2),pipe(1,4),pipe(3,5),-200)
255 compare(pipe(1,4),pipe(2,3),pipe(3,5),-200)
256 compare(pipe(1,4),pipe(2,4),pipe(3,5),-200)
257 compare(pipe(1,4),pipe(3,4),pipe(3,5),-200)
258 compare(pipe(1,4),pipe(3,5),pipe(3,5),-200)
259 compare(pipe(2,3),pipe(3,4),pipe(3,5),-200)
260 compare(pipe(2,3),pipe(3,5),pipe(3,5),-200)
261 compare(pipe(2,3),pipe(2,4),pipe(3,5),-200)
262 compare(pipe(2,4),pipe(3,4),pipe(3,5),-200)
263 compare(pipe(2,4),pipe(3,5),pipe(3,5),-200)
264 compare(pipe(3,4),pipe(3,5),pipe(3,5),-200)
265 compare(pipe(1,2),pipe(1,4),pipe(1,2),57)
266 compare(pipe(1,2),pipe(2,3),pipe(1,2),57)
267 compare(pipe(1,4),pipe(2,3),pipe(1,2),57)
268 compare(pipe(1,2),pipe(2,4),pipe(1,2),57)
269 compare(pipe(1,4),pipe(2,4),pipe(1,2),57)
270 compare(pipe(2,3),pipe(2,4),pipe(1,2),57)
271 compare(pipe(1,2),pipe(3,4),pipe(1,2),57)
272 compare(pipe(1,4),pipe(3,4),pipe(1,2),57)
273 compare(pipe(2,3),pipe(3,4),pipe(1,2),57)
274 compare(pipe(2,4),pipe(3,4),pipe(1,2),57)
275 compare(pipe(1,2),pipe(3,5),pipe(1,2),57)
276 compare(pipe(1,4),pipe(3,5),pipe(1,2),57)
277 compare(pipe(2,3),pipe(3,5),pipe(1,2),57)
278 compare(pipe(2,4),pipe(3,5),pipe(1,2),57)
279 compare(pipe(3,4),pipe(3,5),pipe(1,2),57)
280 compare(pipe(1,2),pipe(2,3),pipe(1,4),65)
281 compare(pipe(1,4),pipe(2,3),pipe(1,4),65)
282 compare(pipe(1,2),pipe(2,4),pipe(1,4),65)
283 compare(pipe(1,4),pipe(2,4),pipe(1,4),65)
284 compare(pipe(2,3),pipe(2,4),pipe(1,4),65)
285 compare(pipe(1,2),pipe(3,4),pipe(1,4),65)
286 compare(pipe(1,4),pipe(3,4),pipe(1,4),65)
287 compare(pipe(2,3),pipe(3,4),pipe(1,4),65)
288 compare(pipe(2,4),pipe(3,4),pipe(1,4),65)
289 compare(pipe(1,2),pipe(3,5),pipe(1,4),65)
290 compare(pipe(1,4),pipe(3,5),pipe(1,4),65)
291 compare(pipe(2,3),pipe(3,5),pipe(1,4),65)
292 compare(pipe(2,4),pipe(3,5),pipe(1,4),65)
293 compare(pipe(3,4),pipe(3,5),pipe(1,4),65)
294 compare(pipe(1,2),pipe(1,4),pipe(2,3),155)
295 compare(pipe(1,2),pipe(2,4),pipe(2,3),155)
296 compare(pipe(1,4),pipe(2,4),pipe(2,3),155)
297 compare(pipe(2,3),pipe(2,4),pipe(2,3),155)
298 compare(pipe(1,2),pipe(3,4),pipe(2,3),155)
299 compare(pipe(1,4),pipe(3,4),pipe(2,3),155)
300 compare(pipe(2,3),pipe(3,4),pipe(2,3),155)
301 compare(pipe(2,4),pipe(3,4),pipe(2,3),155)
302 compare(pipe(1,2),pipe(3,5),pipe(2,3),155)
303 compare(pipe(1,4),pipe(3,5),pipe(2,3),155)
304 compare(pipe(2,3),pipe(3,5),pipe(2,3),155)
305 compare(pipe(2,4),pipe(3,5),pipe(2,3),155)
306 compare(pipe(3,4),pipe(3,5),pipe(2,3),155)
307 compare(pipe(1,2),pipe(1,4),pipe(2,4),129)
308 compare(pipe(1,2),pipe(2,3),pipe(2,4),129)
309 compare(pipe(1,4),pipe(2,3),pipe(2,4),129)
310 compare(pipe(1,2),pipe(3,4),pipe(2,4),129)
311 compare(pipe(1,4),pipe(3,4),pipe(2,4),129)
312 compare(pipe(2,3),pipe(3,4),pipe(2,4),129)
313 compare(pipe(2,4),pipe(3,4),pipe(2,4),129)
314 compare(pipe(1,2),pipe(3,5),pipe(2,4),129)
315 compare(pipe(1,4),pipe(3,5),pipe(2,4),129)
316 compare(pipe(2,3),pipe(3,5),pipe(2,4),129)
317 compare(pipe(2,4),pipe(3,5),pipe(2,4),129)
318 compare(pipe(3,4),pipe(3,5),pipe(2,4),129)
319 compare(pipe(1,2),pipe(2,3),pipe(3,4),78)
320 compare(pipe(1,4),pipe(2,3),pipe(3,4),78)
321 compare(pipe(1,2),pipe(2,4),pipe(3,4),78)
322 compare(pipe(1,4),pipe(2,4),pipe(3,4),78)
323 compare(pipe(2,3),pipe(2,4),pipe(3,4),78)
324 compare(pipe(1,2),pipe(3,5),pipe(3,4),78)
325 compare(pipe(1,4),pipe(3,5),pipe(3,4),78)
326 compare(pipe(2,3),pipe(3,5),pipe(3,4),78)
327 compare(pipe(2,4),pipe(3,5),pipe(3,4),78)
328 compare(pipe(3,4),pipe(3,5),pipe(3,4),78)
329 compare(pipe(1,2),pipe(1,4),pipe(3,4),78)
330 compare(pipe(1,2),pipe(1,4),pipe(3,5),200)
331 compare(pipe(1,2),pipe(2,3),pipe(3,5),200)
332 compare(pipe(1,4),pipe(2,3),pipe(3,5),200)
333 compare(pipe(1,2),pipe(2,4),pipe(3,5),200)
334 compare(pipe(1,4),pipe(2,4),pipe(3,5),200)
335 compare(pipe(2,3),pipe(2,4),pipe(3,5),200)
336 compare(pipe(1,2),pipe(3,4),pipe(3,5),200)
337 compare(pipe(1,4),pipe(3,4),pipe(3,5),200)
338 compare(pipe(2,3),pipe(3,4),pipe(3,5),200)
339 compare(pipe(2,4),pipe(3,4),pipe(3,5),200)
347 lower(pipe(1,2))
355 lower(pipe(1,4))
391 lower(pipe(2,3))
399 lower(pipe(2,4))
449 lower(pipe(3,4))
450 lower(pipe(3,5))
451 worst_deliv_dem(1,2,57)
452 worst_deliv_dem(1,4,65)
453 worst_deliv_dem(2,3,155)
454 worst_deliv_dem(2,4,129)
455 worst_deliv_dem(3,4,78)
456 worst_deliv_dem(3,5,200)
4 junction(1)
5 junction(2)
6 junction(3)
7 junction(4)
8 junction(5)
0
B+
0
B-
1
0
1
"""
output = """
{valve(1,2), valve(1,4), lower(pipe(3,4)), lower(pipe(2,4)), lower(pipe(2,3))}
"""
| apache-2.0 |
abdoosh00/edx-rtl-final | lms/djangoapps/verify_student/tests/test_views.py | 5 | 4251 | """
verify_student/start?course_id=MITx/6.002x/2013_Spring # create
/upload_face?course_id=MITx/6.002x/2013_Spring
/upload_photo_id
/confirm # mark_ready()
---> To Payment
"""
import urllib
from mock import patch, Mock, ANY
from django.test import TestCase
from django.test.utils import override_settings
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
from xmodule.modulestore.tests.factories import CourseFactory
from courseware.tests.tests import TEST_DATA_MONGO_MODULESTORE
from student.tests.factories import UserFactory
from course_modes.models import CourseMode
from verify_student.views import render_to_response
from verify_student.models import SoftwareSecurePhotoVerification
def mock_render_to_response(*args, **kwargs):
return render_to_response(*args, **kwargs)
render_mock = Mock(side_effect=mock_render_to_response)
class StartView(TestCase):
def start_url(self, course_id=""):
return "/verify_student/{0}".format(urllib.quote(course_id))
def test_start_new_verification(self):
"""
Test the case where the user has no pending `PhotoVerficiationAttempts`,
but is just starting their first.
"""
user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
def must_be_logged_in(self):
self.assertHttpForbidden(self.client.get(self.start_url()))
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE)
class TestVerifyView(TestCase):
def setUp(self):
self.user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
self.course_id = 'Robot/999/Test_Course'
CourseFactory.create(org='Robot', number='999', display_name='Test Course')
verified_mode = CourseMode(course_id=self.course_id,
mode_slug="verified",
mode_display_name="Verified Certificate",
min_price=50)
verified_mode.save()
def test_invalid_course(self):
fake_course_id = "Robot/999/Fake_Course"
url = reverse('verify_student_verify',
kwargs={"course_id": fake_course_id})
response = self.client.get(url)
self.assertEquals(response.status_code, 302)
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE)
class TestReverifyView(TestCase):
"""
Tests for the reverification views
"""
def setUp(self):
self.user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
@patch('verify_student.views.render_to_response', render_mock)
def test_reverify_get(self):
url = reverse('verify_student_reverify')
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
((_template, context), _kwargs) = render_mock.call_args
self.assertFalse(context['error'])
@patch('verify_student.views.render_to_response', render_mock)
def test_reverify_post_failure(self):
url = reverse('verify_student_reverify')
response = self.client.post(url, {'face_image': '',
'photo_id_image': ''})
self.assertEquals(response.status_code, 200)
((template, context), _kwargs) = render_mock.call_args
self.assertIn('photo_reverification', template)
self.assertTrue(context['error'])
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_reverify_post_success(self):
url = reverse('verify_student_reverify')
response = self.client.post(url, {'face_image': ',',
'photo_id_image': ','})
self.assertEquals(response.status_code, 302)
try:
verification_attempt = SoftwareSecurePhotoVerification.objects.get(user=self.user)
self.assertIsNotNone(verification_attempt)
except ObjectDoesNotExist:
self.fail('No verification object generated')
| agpl-3.0 |
itsvetkov/pyqtgraph | pyqtgraph/widgets/CheckTable.py | 52 | 3361 | # -*- coding: utf-8 -*-
from ..Qt import QtGui, QtCore
from . import VerticalLabel
__all__ = ['CheckTable']
class CheckTable(QtGui.QWidget):
sigStateChanged = QtCore.Signal(object, object, object) # (row, col, state)
def __init__(self, columns):
QtGui.QWidget.__init__(self)
self.layout = QtGui.QGridLayout()
self.layout.setSpacing(0)
self.setLayout(self.layout)
self.headers = []
self.columns = columns
col = 1
for c in columns:
label = VerticalLabel.VerticalLabel(c, orientation='vertical')
self.headers.append(label)
self.layout.addWidget(label, 0, col)
col += 1
self.rowNames = []
self.rowWidgets = []
self.oldRows = {} ## remember settings from removed rows; reapply if they reappear.
def updateRows(self, rows):
for r in self.rowNames[:]:
if r not in rows:
self.removeRow(r)
for r in rows:
if r not in self.rowNames:
self.addRow(r)
def addRow(self, name):
label = QtGui.QLabel(name)
row = len(self.rowNames)+1
self.layout.addWidget(label, row, 0)
checks = []
col = 1
for c in self.columns:
check = QtGui.QCheckBox('')
check.col = c
check.row = name
self.layout.addWidget(check, row, col)
checks.append(check)
if name in self.oldRows:
check.setChecked(self.oldRows[name][col])
col += 1
#QtCore.QObject.connect(check, QtCore.SIGNAL('stateChanged(int)'), self.checkChanged)
check.stateChanged.connect(self.checkChanged)
self.rowNames.append(name)
self.rowWidgets.append([label] + checks)
def removeRow(self, name):
row = self.rowNames.index(name)
self.oldRows[name] = self.saveState()['rows'][row] ## save for later
self.rowNames.pop(row)
for w in self.rowWidgets[row]:
w.setParent(None)
#QtCore.QObject.disconnect(w, QtCore.SIGNAL('stateChanged(int)'), self.checkChanged)
if isinstance(w, QtGui.QCheckBox):
w.stateChanged.disconnect(self.checkChanged)
self.rowWidgets.pop(row)
for i in range(row, len(self.rowNames)):
widgets = self.rowWidgets[i]
for j in range(len(widgets)):
widgets[j].setParent(None)
self.layout.addWidget(widgets[j], i+1, j)
def checkChanged(self, state):
check = QtCore.QObject.sender(self)
#self.emit(QtCore.SIGNAL('stateChanged'), check.row, check.col, state)
self.sigStateChanged.emit(check.row, check.col, state)
def saveState(self):
rows = []
for i in range(len(self.rowNames)):
row = [self.rowNames[i]] + [c.isChecked() for c in self.rowWidgets[i][1:]]
rows.append(row)
return {'cols': self.columns, 'rows': rows}
def restoreState(self, state):
rows = [r[0] for r in state['rows']]
self.updateRows(rows)
for r in state['rows']:
rowNum = self.rowNames.index(r[0])
for i in range(1, len(r)):
self.rowWidgets[rowNum][i].setChecked(r[i])
| mit |
dya2/python-for-android | python3-alpha/python3-src/Lib/test/crashers/gc_inspection.py | 195 | 1092 | """
gc.get_referrers() can be used to see objects before they are fully built.
Note that this is only an example. There are many ways to crash Python
by using gc.get_referrers(), as well as many extension modules (even
when they are using perfectly documented patterns to build objects).
Identifying and removing all places that expose to the GC a
partially-built object is a long-term project. A patch was proposed on
SF specifically for this example but I consider fixing just this single
example a bit pointless (#1517042).
A fix would include a whole-scale code review, possibly with an API
change to decouple object creation and GC registration, and according
fixes to the documentation for extension module writers. It's unlikely
to happen, though. So this is currently classified as
"gc.get_referrers() is dangerous, use only for debugging".
"""
import gc
def g():
marker = object()
yield marker
# now the marker is in the tuple being constructed
[tup] = [x for x in gc.get_referrers(marker) if type(x) is tuple]
print(tup)
print(tup[1])
tuple(g())
| apache-2.0 |
BillyLiggins/fitting | first.py | 1 | 7031 | import copy
import echidna
import echidna.output.plot as plot
import echidna.core.spectra as spectra
from echidna.output import store
import matplotlib.pyplot as plt
import argparse
import glob
import numpy as np
import os
def convertor(path):
flist=np.array(glob.glob(path))
for ntuple in flist:
os.system("python ~/echidna/echidna/scripts/dump_spectra_ntuple.py -c ~/workspace/PhD/fitting/config.yml -f "+ str(ntuple)+" -s hdf5/")
def combinerNtuple(path,filename):
flist=np.array(glob.glob(path))
print flist
first = True
for hdf5 in flist:
print hdf5
if first:
spectrum1 = store.fill_from_ntuple(hdf5)
first = False
else:
spectrum2 = store.fill_from_ntuple(hdf5)
spectrum1.add(spectrum2)
store.dump(filename, spectrum1)
def combiner(path,filename):
flist=np.array(glob.glob(path))
print flist
first = True
for hdf5 in flist:
print hdf5
if first:
spectrum1 = store.load(hdf5)
first = False
else:
spectrum2 = store.load(hdf5)
spectrum1.add(spectrum2)
store.dump(filename, spectrum1)
"""The way you should do it is to define a lot of spectra and then plot them.
You don't really know how to normlise the histrogram or indeed weather that is of any uses in the first
place.
"""
def slicer(spectrumPath,filler,nslice):
for i in range(nslice):
spectrum=store.load(spectrumPath)
print spectrum.sum()
shrink_dict = {"energy_reco_low": 0.,
"energy_reco_high": 0.6,
"radial_reco_low": i*6000.0/nslice,
"radial_reco_high": (i+1)*6000/nslice}
spectrum.cut(**shrink_dict)
spectrum.scale(1)
spec2=copy.copy(spectrum)
spec2._name=str(i*1000)+"mm to "+str((i+1)*1000)+"mm"
print type(spec2)
filler.append(spec2)
def slicerMC(spectrumPath,filler,nslice):
for i in range(nslice):
spectrum=store.load(spectrumPath)
print spectrum.sum()
shrink_dict = {"energy_mc_low": 0.,
"energy_mc_high": 1,
"radial_mc_low": i*6000.0/nslice,
"radial_mc_high": (i+1)*6000/nslice}
spectrum.cut(**shrink_dict)
spectrum.scale(1)
spec2=copy.copy(spectrum)
spec2._name="MC"
print type(spec2)
print "This gives the number os events in each window:"
print "mc : "+str(i*6000.0/nslice)+"mm to "+str((i+1)*6000.0/nslice)+"mm : "+str(spec2.sum())
filler.append(spec2)
def slicerReco(spectrumPath,filler,nslice):
for i in range(nslice):
spectrum=store.load(spectrumPath)
print spectrum.sum()
shrink_dict = {"energy_reco_low": 0.,
"energy_reco_high": 1.,
"radial_reco_low": i*6000.0/nslice,
"radial_reco_high": (i+1)*6000/nslice}
spectrum.cut(**shrink_dict)
spectrum.scale(1)
spec2=copy.copy(spectrum)
spec2._name="Reco"
print type(spec2)
print "This gives the number os events in each window:"
print "reco : "+str(i*6000.0/nslice)+"mm to "+str((i+1)*6000.0/nslice)+"mm : "+str(spec2.sum())
filler.append(spec2)
def signalPlotter(spectra,dim,name):
i=0
for spec in spectra:
fig = plt.figure()
ax= fig.add_subplot(1,1,1)
par = spec.get_config().get_par(dim)
width = par.get_width()
bins = np.linspace(par._low,par._high, par._bins+1)
x = bins[:-1] + 0.5*width
plt.xlabel(str(dim)+ " [" + par.get_unit() + "]")
plt.ylabel("Events per " + str(width) + " " + par.get_unit() + " bin")
ax.set(title="Normalised energy spectrum in "+str(i*1000)+"mm to "+str((i+1)*1000)+"mm ",ylabel="Events per " + str(width) + " " + par.get_unit() + " bin", xlabel=str(dim)+" [" + par.get_unit() + "]")
ax.hist(x,bins,weights=spec.project(dim),histtype="stepfilled", color="RoyalBlue",label=spec._name)
fig.savefig("slice_"+str(name)+"_"+str(i*1000)+"_"+str((i+1)*1000)+".png")
i=1+i
def combiPlotter(spectra,dim,name):
i=0
fig = plt.figure()
ax= fig.add_subplot(1,1,1)
for spec in spectra:
par = spec.get_config().get_par(dim)
width = par.get_width()
bins = np.linspace(par._low,par._high, par._bins+1)
x = bins[:-1] + 0.5*width
plt.xlabel(str(dim)+ " [" + par.get_unit() + "]")
plt.ylabel("Events per " + str(width) + " " + par.get_unit() + " bin")
ax.set(title="Normalised energy spectrum in 1000mm slices",ylabel="Events per " + str(width) + " " + par.get_unit() + " bin", xlabel="energy_reco"+ " [" + par.get_unit() + "]")
ax.hist(x,bins,weights=spec.project("energy_reco"),label=spec._name,histtype='step')
ax.set_ylim([0,0.03])
ax.set_xlim([0.2,0.7])
ax.legend(loc="best")
fig.savefig("combined_"+str(name)+".png")
def func(path,nslice,name):
spectra=[]
slicer(path,spectra,nslice)
signalPlotter(spectra,"energy_reco",name)
combiPlotter(spectra,"energy_reco",name)
def po210():
convertor("po210_ntuple/*")
combiner("hdf5/SolarPo**ntuple*","hdf5/SolarPo210_combined.hdf5")
plotpath="plots/"
func("hdf5/SolarPo210_combined.hdf5",6,"po210")
def bi210():
convertor("bi210_ntuple/*")
combiner("hdf5/SolarBi**ntuple*","hdf5/SolarBi210_combined.hdf5")
plotpath="plots/"
func("hdf5/SolarBi210_combined.hdf5",6,"bi210")
def compair(spectrumPathReco,spectrumPathMC,name):
spectraReco=[]
spectraMC=[]
slicerReco(spectrumPathReco,spectraReco,6)
slicerMC(spectrumPathMC,spectraMC,6)
for i in range(0,len(spectraReco)):
fig = plt.figure()
ax= fig.add_subplot(1,1,1)
par = spectraReco[i].get_config().get_par("energy_reco")
width = par.get_width()
bins = np.linspace(par._low,par._high, par._bins+1)
x = bins[:-1] + 0.5*width
ax.set(title="Normalised energy spectrum in "+str(i*1000)+"mm to "+str((i+1)*1000)+"mm ",ylabel="Events per " + str(width) + " " + par.get_unit() + " bin", xlabel="Energy [" + par.get_unit() + "]")
ax.hist(x,bins,weights=spectraReco[i].project("energy_reco"),histtype="stepfilled",label=spectraReco[i]._name)
par = spectraMC[i].get_config().get_par("energy_mc")
width = par.get_width()
bins = np.linspace(par._low,par._high, par._bins+1)
x = bins[:-1] + 0.5*width
ax.hist(x,bins,weights=spectraMC[i].project("energy_mc"),histtype="stepfilled",label=spectraMC[i]._name,alpha=0.75)
ax.legend(loc=2)
fig.savefig("compare_"+str(name)+"_"+str(i*1000)+"_"+str((i+1)*1000)+".png")
if __name__=="__main__":
print "You need to compare the recon against the mc"
print "You should bin in bigger bins becuase you could then bin in 4d"
"""You need to plot the standard spectra"""
| mit |
MichaelNedzelsky/intellij-community | python/lib/Lib/stat.py | 145 | 1667 | """Constants/functions for interpreting results of os.stat() and os.lstat().
Suggested usage: from stat import *
"""
# XXX Strictly spoken, this module may have to be adapted for each POSIX
# implementation; in practice, however, the numeric constants used by
# stat() are almost universal (even for stat() emulations on non-UNIX
# systems like MS-DOS).
# Indices for stat struct members in tuple returned by os.stat()
ST_MODE = 0
ST_INO = 1
ST_DEV = 2
ST_NLINK = 3
ST_UID = 4
ST_GID = 5
ST_SIZE = 6
ST_ATIME = 7
ST_MTIME = 8
ST_CTIME = 9
# Extract bits from the mode
def S_IMODE(mode):
return mode & 07777
def S_IFMT(mode):
return mode & 0170000
# Constants used as S_IFMT() for various file types
# (not all are implemented on all systems)
S_IFDIR = 0040000
S_IFCHR = 0020000
S_IFBLK = 0060000
S_IFREG = 0100000
S_IFIFO = 0010000
S_IFLNK = 0120000
S_IFSOCK = 0140000
# Functions to test for each file type
def S_ISDIR(mode):
return S_IFMT(mode) == S_IFDIR
def S_ISCHR(mode):
return S_IFMT(mode) == S_IFCHR
def S_ISBLK(mode):
return S_IFMT(mode) == S_IFBLK
def S_ISREG(mode):
return S_IFMT(mode) == S_IFREG
def S_ISFIFO(mode):
return S_IFMT(mode) == S_IFIFO
def S_ISLNK(mode):
return S_IFMT(mode) == S_IFLNK
def S_ISSOCK(mode):
return S_IFMT(mode) == S_IFSOCK
# Names for permission bits
S_ISUID = 04000
S_ISGID = 02000
S_ENFMT = S_ISGID
S_ISVTX = 01000
S_IREAD = 00400
S_IWRITE = 00200
S_IEXEC = 00100
S_IRWXU = 00700
S_IRUSR = 00400
S_IWUSR = 00200
S_IXUSR = 00100
S_IRWXG = 00070
S_IRGRP = 00040
S_IWGRP = 00020
S_IXGRP = 00010
S_IRWXO = 00007
S_IROTH = 00004
S_IWOTH = 00002
S_IXOTH = 00001
| apache-2.0 |
Widiot/simpleblog | venv/lib/python3.5/site-packages/pip/_vendor/retrying.py | 934 | 9972 | ## Copyright 2013-2014 Ray Holder
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import random
from pip._vendor import six
import sys
import time
import traceback
# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
MAX_WAIT = 1073741823
def retry(*dargs, **dkw):
"""
Decorator function that instantiates the Retrying object
@param *dargs: positional arguments passed to Retrying object
@param **dkw: keyword arguments passed to the Retrying object
"""
# support both @retry and @retry() as valid syntax
if len(dargs) == 1 and callable(dargs[0]):
def wrap_simple(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying().call(f, *args, **kw)
return wrapped_f
return wrap_simple(dargs[0])
else:
def wrap(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying(*dargs, **dkw).call(f, *args, **kw)
return wrapped_f
return wrap
class Retrying(object):
def __init__(self,
stop=None, wait=None,
stop_max_attempt_number=None,
stop_max_delay=None,
wait_fixed=None,
wait_random_min=None, wait_random_max=None,
wait_incrementing_start=None, wait_incrementing_increment=None,
wait_exponential_multiplier=None, wait_exponential_max=None,
retry_on_exception=None,
retry_on_result=None,
wrap_exception=False,
stop_func=None,
wait_func=None,
wait_jitter_max=None):
self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number
self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay
self._wait_fixed = 1000 if wait_fixed is None else wait_fixed
self._wait_random_min = 0 if wait_random_min is None else wait_random_min
self._wait_random_max = 1000 if wait_random_max is None else wait_random_max
self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start
self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment
self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier
self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max
self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max
# TODO add chaining of stop behaviors
# stop behavior
stop_funcs = []
if stop_max_attempt_number is not None:
stop_funcs.append(self.stop_after_attempt)
if stop_max_delay is not None:
stop_funcs.append(self.stop_after_delay)
if stop_func is not None:
self.stop = stop_func
elif stop is None:
self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs)
else:
self.stop = getattr(self, stop)
# TODO add chaining of wait behaviors
# wait behavior
wait_funcs = [lambda *args, **kwargs: 0]
if wait_fixed is not None:
wait_funcs.append(self.fixed_sleep)
if wait_random_min is not None or wait_random_max is not None:
wait_funcs.append(self.random_sleep)
if wait_incrementing_start is not None or wait_incrementing_increment is not None:
wait_funcs.append(self.incrementing_sleep)
if wait_exponential_multiplier is not None or wait_exponential_max is not None:
wait_funcs.append(self.exponential_sleep)
if wait_func is not None:
self.wait = wait_func
elif wait is None:
self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs)
else:
self.wait = getattr(self, wait)
# retry on exception filter
if retry_on_exception is None:
self._retry_on_exception = self.always_reject
else:
self._retry_on_exception = retry_on_exception
# TODO simplify retrying by Exception types
# retry on result filter
if retry_on_result is None:
self._retry_on_result = self.never_reject
else:
self._retry_on_result = retry_on_result
self._wrap_exception = wrap_exception
def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the previous attempt >= stop_max_attempt_number."""
return previous_attempt_number >= self._stop_max_attempt_number
def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the time from the first attempt >= stop_max_delay."""
return delay_since_first_attempt_ms >= self._stop_max_delay
def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Don't sleep at all before retrying."""
return 0
def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a fixed amount of time between each retry."""
return self._wait_fixed
def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a random amount of time between wait_random_min and wait_random_max"""
return random.randint(self._wait_random_min, self._wait_random_max)
def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""
Sleep an incremental amount of time after each attempt, starting at
wait_incrementing_start and incrementing by wait_incrementing_increment
"""
result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1))
if result < 0:
result = 0
return result
def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
exp = 2 ** previous_attempt_number
result = self._wait_exponential_multiplier * exp
if result > self._wait_exponential_max:
result = self._wait_exponential_max
if result < 0:
result = 0
return result
def never_reject(self, result):
return False
def always_reject(self, result):
return True
def should_reject(self, attempt):
reject = False
if attempt.has_exception:
reject |= self._retry_on_exception(attempt.value[1])
else:
reject |= self._retry_on_result(attempt.value)
return reject
def call(self, fn, *args, **kwargs):
start_time = int(round(time.time() * 1000))
attempt_number = 1
while True:
try:
attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
except:
tb = sys.exc_info()
attempt = Attempt(tb, attempt_number, True)
if not self.should_reject(attempt):
return attempt.get(self._wrap_exception)
delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time
if self.stop(attempt_number, delay_since_first_attempt_ms):
if not self._wrap_exception and attempt.has_exception:
# get() on an attempt with an exception should cause it to be raised, but raise just in case
raise attempt.get()
else:
raise RetryError(attempt)
else:
sleep = self.wait(attempt_number, delay_since_first_attempt_ms)
if self._wait_jitter_max:
jitter = random.random() * self._wait_jitter_max
sleep = sleep + max(0, jitter)
time.sleep(sleep / 1000.0)
attempt_number += 1
class Attempt(object):
"""
An Attempt encapsulates a call to a target function that may end as a
normal return value from the function or an Exception depending on what
occurred during the execution.
"""
def __init__(self, value, attempt_number, has_exception):
self.value = value
self.attempt_number = attempt_number
self.has_exception = has_exception
def get(self, wrap_exception=False):
"""
Return the return value of this Attempt instance or raise an Exception.
If wrap_exception is true, this Attempt is wrapped inside of a
RetryError before being raised.
"""
if self.has_exception:
if wrap_exception:
raise RetryError(self)
else:
six.reraise(self.value[0], self.value[1], self.value[2])
else:
return self.value
def __repr__(self):
if self.has_exception:
return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2])))
else:
return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)
class RetryError(Exception):
"""
A RetryError encapsulates the last Attempt instance right before giving up.
"""
def __init__(self, last_attempt):
self.last_attempt = last_attempt
def __str__(self):
return "RetryError[{0}]".format(self.last_attempt)
| mit |
Kamp9/scipy | benchmarks/benchmarks/sparse_linalg_solve.py | 52 | 1688 | """
Check the speed of the conjugate gradient solver.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.testing import assert_equal
try:
from scipy import linalg, sparse
from scipy.sparse.linalg import cg
except ImportError:
pass
from .common import Benchmark
def _create_sparse_poisson1d(n):
# Make Gilbert Strang's favorite matrix
# http://www-math.mit.edu/~gs/PIX/cupcakematrix.jpg
P1d = sparse.diags([[-1]*(n-1), [2]*n, [-1]*(n-1)], [-1, 0, 1])
assert_equal(P1d.shape, (n, n))
return P1d
def _create_sparse_poisson2d(n):
P1d = _create_sparse_poisson1d(n)
P2d = sparse.kronsum(P1d, P1d)
assert_equal(P2d.shape, (n*n, n*n))
return P2d
class Bench(Benchmark):
params = [
[4, 6, 10, 16, 25, 40, 64, 100, 160, 250, 400, 640, 1000, 1600],
['sparse', 'dense']
]
param_names = ['(n,n)', 'solver']
def setup(self, n, solver):
dense_is_active = (n**2 < 600)
sparse_is_active = (n**2 < 20000)
if solver == 'dense' and not dense_is_active:
raise NotImplementedError()
if solver == 'sparse' and not sparse_is_active:
raise NotImplementedError()
self.b = np.ones(n*n)
self.P_sparse = _create_sparse_poisson2d(n)
self.P_dense = self.P_sparse.A
def time_cg(self, n, solver):
if solver == 'dense':
linalg.solve(self.P_dense, self.b)
else:
cg(self.P_sparse, self.b)
def time_spsolve(self, n, solver):
if solver == 'dense':
linalg.solve(self.P_dense, self.b)
else:
cg(self.P_sparse, self.b)
| bsd-3-clause |
AirChen/MachineLearningDoc | collective_intelligence/chapter_7/treepredict.py | 1 | 6590 | #!/usr/bin/python
from math import log
from PIL import Image, ImageDraw
my_data=[['slashdot','USA','yes',18,'None'],
['google','France','yes',23,'Premium'],
['digg','USA','yes',24,'Basic'],
['kiwitobes','France','yes',23,'Basic'],
['google','UK','no',21,'Premium'],
['(direct)','New Zealand','no',12,'None'],
['(direct)','UK','no',21,'Basic'],
['google','USA','no',24,'Premium'],
['slashdot','France','yes',19,'None'],
['digg','USA','no',18,'None'],
['google','UK','no',18,'None'],
['kiwitobes','UK','no',19,'None'],
['digg','New Zealand','yes',12,'Basic'],
['slashdot','UK','no',21,'None'],
['google','UK','yes',18,'Basic'],
['kiwitobes','France','yes',19,'Basic']]
class decisionnode:
def __init__(self, col = -1, value = None, results = None, tb = None, fb = None):
self.col = col
self.value = value
self.results = results
self.tb = tb
self.fb = fb
def divideset(rows, column, value):
split_function = None
if isinstance(value, int) or isinstance(value, float):
split_function = lambda row:row[column] >= value
else:
split_function = lambda row:row[column] == value
set1 = [row for row in rows if split_function(row)]
set2 = [row for row in rows if not split_function(row)]
return (set1, set2)
def uniquecount(rows):
results = {}
for row in rows:
r = row[len(row)-1]
if r not in results:
results[r] = 0
results[r] += 1
return results
def giniimpurity(rows):
total = len(rows)
counts = uniquecount(rows)
imp = 0.0
for k1 in counts:
p1 = float(counts[k1])/total
for k2 in counts:
if k1 == k2:
continue
p2 = float(counts[k2])/total
imp += p1*p2
return imp
def entropy(rows):
log2 = lambda x:log(x)/log(2)
results = uniquecount(rows)
total = len(rows)
ent = 0.0
for k in results:
p = float(results[k])/total
ent = ent - p*log2(p)
return ent
def buildtree(rows, scoref=entropy):
if len(rows) == 0:
return decisionnode()
current_score = scoref(rows)
best_gain = 0.0
best_criteria = None
best_sets = None
column_count = len(rows[0])-1
for col in range(0, column_count):
column_values = {}
for row in rows:
column_values[row[col]] = 1
for value in column_values.keys():
set1, set2 = divideset(rows, col, value)
p = float(len(set1))/len(rows)
gain = current_score - p * scoref(set1) - (1-p) * scoref(set2)
if gain > best_gain and len(set1) > 0 and len(set2) > 0:
best_gain = gain
best_criteria = (col, value)
best_sets = (set1, set2)
if best_gain > 0.0:
trueBranch = buildtree(best_sets[0])
falseBranch = buildtree(best_sets[1])
return decisionnode(col = best_criteria[0], value = best_criteria[1], tb = trueBranch, fb = falseBranch)
else:
return decisionnode(results = uniquecount(rows))
def printtree(tree, indent = ''):
if tree.results != None:
print str(tree.results)
else:
print str(tree.col) + ':' + str(tree.value) + '?'
print indent + 'T->'
printtree(tree.tb, indent+' ')
print indent + 'F->'
printtree(tree.fb, indent+' ')
def getwidth(tree):
if tree.tb == None and tree.fb == None:
return 1
return getwidth(tree.tb) + getwidth(tree.fb)
def getdepth(tree):
if tree.tb == None and tree.fb == None:
return 0
return max(getdepth(tree.tb),getdepth(tree.fb)) + 1
def drawtree(tree, jpeg = 'tree.jpg'):
w = getwidth(tree)*100
h = getdepth(tree)*100+120
img = Image.new('RGB', (w,h), (255,255,255))
draw = ImageDraw.Draw(img)
drawnode(draw, tree, w/2, 20)
img.save(jpeg, 'JPEG')
def drawnode(draw, tree, x, y):
if tree.results == None:
w1 = getwidth(tree.fb)*100
w2 = getwidth(tree.tb)*100
left = x - (w1+w2)/2
right = x + (w1+w2)/2
draw.text((x-20,y-10), str(tree.col) + ':' + str(tree.value),(0,0,0))
draw.line((x, y, left+w1/2, y+100), fill = (255, 0, 0))
draw.line((x, y, right-w2/2, y+100), fill = (255, 0, 0))
drawnode(draw, tree.fb, left+w1/2, y+100)
drawnode(draw, tree.tb, right-w2/2, y+100)
else:
txt = ' \n'.join(['%s:%d'%v for v in tree.results.items()])
draw.text((x-20, y), txt, (0, 0, 0))
def classify(observation, tree):
if tree.results != None:
return tree.results
else:
v = observation[tree.col]
branch = None
if isinstance(v, int) or isinstance(v, float):
if v >= tree.value:
branch = tree.tb
else:
branch = tree.fb
else:
if v == tree.value:
branch = tree.tb
else:
branch = tree.fb
return classify(observation, branch)
def prune(tree, mingain):
if tree.tb.results == None:
prune(tree.tb, mingain)
if tree.fb.results == None:
prune(tree.fb, mingain)
if tree.tb.results != None and tree.fb.results != None:
tb,fb = [],[]
for v,c in tree.tb.results.items():
tb += [[v]]*c
for v,c in tree.fb.results.items():
fb += [[v]]*c
delta = entropy(tb+fb) - (entropy(tb) + entropy(fb)/2)
if delta < mingain:
tree.tb, tree.fb = None, None
tree.results = uniquecount(tb + fb)
def mdclassify(observation, tree):
if tree.results != None:
return tree.results
else:
v = observation[tree.col]
if v == None:
tr, fr = mdclassify(observation, tree.tb), mdclassify(observation, tree.fb)
tcount = sum(tr.values())
fcount = sum(fr.values())
tw = float(tcount)/(tcount + fcount)
fw = 1 - tw
result = {}
for k,v in tr.items():
result[k] = v*tw
for k,v in fr.items():
if k not in result:
result[k] = 0
result[k] += v*fw
return result
else:
branch = None
if isinstance(v, int) or isinstance(v, float):
if v >= tree.value:
branch = tree.tb
else:
branch = tree.fb
else:
else:
branch = tree.fb
if v == tree.value:
branch = tree.tb
return mdclassify(observation, branch)
def variance(rows):
if len(rows)==0:
return 0
data = [float(row[len(row)-1]) for row in rows]
mean = sum(data)/len(rows)
variance = sum([(d - mean) ** 2 for d in data])/len(data)
return variance
if __name__ == "__main__":
# print divideset(my_data, 3, 20)
# print giniimpurity(my_data)
# print entropy(my_data)
#
# print 'test'
# set1, set2 = divideset(my_data, 2, 'yes')
# print giniimpurity(set1)
# print entropy(set2)
tree = buildtree(my_data)
# printtree(tree)
# drawtree(tree, jpeg = 'treeview.jpg')
# print classify(['(direct)', 'USA', 'yes', 5], tree)
# prune(tree, 0.1)
# printtree(tree)
# print '-------------------'
# prune(tree, 1.0)
# printtree(tree)
print mdclassify(['google', None, 'yes', None], tree)
print '-------------------'
print mdclassify(['google', 'France', None, None], tree)
| mit |
audunv/andp | python/andp/view/web/remote.py | 1 | 3288 | # -*- coding: utf-8; -*-
# Copyright (C) 2009 Østfold University College
#
# This file is part of ANDP.
#
# ANDP is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# 02111-1307, USA.
"""
This module handles the remote control interface
"""
import subprocess, cStringIO
from PIL import Image, ImageDraw, ImageFont
from mod_python import apache
import andp.model.remote
from andp.view.web import Page
class RemoteQR(Page):
"""
Returns a QR code image, and records the client's address for
remote-control callbacks
"""
path = "/remote/qr"
showDebug = False
fontPath = "/usr/share/fonts/truetype/ttf-bitstream-vera/Vera.ttf" # Quite randomly chosen
def Main(self):
host = self.req.get_remote_host(apache.REMOTE_NOLOOKUP)
andp.model.remote.RegisterClient(self.req, host)
sessionID = andp.model.remote.SessionIDForHost(self.req, host)
cmd = ("qrencode", "-s", "16", "-m", "0", "-o", "-")
proc = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE)
uri = "http://%s.%s/%s" % (self.req.config["network"]["host"], self.req.config["network"]["domain"], sessionID)
buf = cStringIO.StringIO(proc.communicate(uri)[0])
qrImage = Image.open(buf)
font = ImageFont.truetype(self.fontPath, 64) # Big enough to scale down well
bottomText = "%s.%s/%s" % (self.req.config["network"]["host"], self.req.config["network"]["domain"], sessionID)
bottomImage = Image.new("RGB", font.getsize(bottomText), (0, 0, 0))
bottomDraw = ImageDraw.Draw(bottomImage)
bottomDraw.text((0, 0), bottomText, font = font)
del bottomDraw
bottomImage = bottomImage.resize((qrImage.size[0], (float(qrImage.size[0]) / bottomImage.size[0]) * bottomImage.size[1]), Image.ANTIALIAS)
compositeImage = Image.new("RGB", (qrImage.size[0], qrImage.size[1] + bottomImage.size[1]), (0, 0, 0))
compositeImage.paste(qrImage, (0, 0))
compositeImage.paste(bottomImage, (0, qrImage.size[1]))
buf = cStringIO.StringIO()
compositeImage.save(buf, "PNG")
self.SendHeader(contentType = "image/png")
self.Write(buf.getvalue())
return apache.OK
class RemoteRemote(Page):
"""
Displays remote-control UI
"""
path = None # Hard-coded in handler.py :-/
def Main(self):
sessionID = self.req.uri[1:5]
host = andp.model.remote.HostBySessionID(self.req, sessionID)
if not host:
return apache.HTTP_NOT_FOUND
self.SendHeader()
self.Write("foo")
return apache.OK
| gpl-2.0 |
bitcity/django | django/core/cache/backends/base.py | 145 | 9680 | "Base Cache class."
from __future__ import unicode_literals
import time
import warnings
from django.core.exceptions import DjangoRuntimeWarning, ImproperlyConfigured
from django.utils.module_loading import import_string
class InvalidCacheBackendError(ImproperlyConfigured):
pass
class CacheKeyWarning(DjangoRuntimeWarning):
pass
# Stub class to ensure not passing in a `timeout` argument results in
# the default timeout
DEFAULT_TIMEOUT = object()
# Memcached does not accept keys longer than this.
MEMCACHE_MAX_KEY_LENGTH = 250
def default_key_func(key, key_prefix, version):
"""
Default function to generate keys.
Constructs the key used by all other methods. By default it prepends
the `key_prefix'. KEY_FUNCTION can be used to specify an alternate
function with custom key making behavior.
"""
return '%s:%s:%s' % (key_prefix, version, key)
def get_key_func(key_func):
"""
Function to decide which key function to use.
Defaults to ``default_key_func``.
"""
if key_func is not None:
if callable(key_func):
return key_func
else:
return import_string(key_func)
return default_key_func
class BaseCache(object):
def __init__(self, params):
timeout = params.get('timeout', params.get('TIMEOUT', 300))
if timeout is not None:
try:
timeout = int(timeout)
except (ValueError, TypeError):
timeout = 300
self.default_timeout = timeout
options = params.get('OPTIONS', {})
max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300))
try:
self._max_entries = int(max_entries)
except (ValueError, TypeError):
self._max_entries = 300
cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3))
try:
self._cull_frequency = int(cull_frequency)
except (ValueError, TypeError):
self._cull_frequency = 3
self.key_prefix = params.get('KEY_PREFIX', '')
self.version = params.get('VERSION', 1)
self.key_func = get_key_func(params.get('KEY_FUNCTION'))
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
"""
Returns the timeout value usable by this backend based upon the provided
timeout.
"""
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
elif timeout == 0:
# ticket 21147 - avoid time.time() related precision issues
timeout = -1
return None if timeout is None else time.time() + timeout
def make_key(self, key, version=None):
"""Constructs the key used by all other methods. By default it
uses the key_func to generate a key (which, by default,
prepends the `key_prefix' and 'version'). An different key
function can be provided at the time of cache construction;
alternatively, you can subclass the cache backend to provide
custom key making behavior.
"""
if version is None:
version = self.version
new_key = self.key_func(key, self.key_prefix, version)
return new_key
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a value in the cache if the key does not already exist. If
timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
Returns True if the value was stored, False otherwise.
"""
raise NotImplementedError('subclasses of BaseCache must provide an add() method')
def get(self, key, default=None, version=None):
"""
Fetch a given key from the cache. If the key does not exist, return
default, which itself defaults to None.
"""
raise NotImplementedError('subclasses of BaseCache must provide a get() method')
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a value in the cache. If timeout is given, that timeout will be
used for the key; otherwise the default cache timeout will be used.
"""
raise NotImplementedError('subclasses of BaseCache must provide a set() method')
def delete(self, key, version=None):
"""
Delete a key from the cache, failing silently.
"""
raise NotImplementedError('subclasses of BaseCache must provide a delete() method')
def get_many(self, keys, version=None):
"""
Fetch a bunch of keys from the cache. For certain backends (memcached,
pgsql) this can be *much* faster when fetching multiple values.
Returns a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict.
"""
d = {}
for k in keys:
val = self.get(k, version=version)
if val is not None:
d[k] = val
return d
def get_or_set(self, key, default=None, timeout=DEFAULT_TIMEOUT, version=None):
"""
Fetch a given key from the cache. If the key does not exist,
the key is added and set to the default value. The default value can
also be any callable. If timeout is given, that timeout will be used
for the key; otherwise the default cache timeout will be used.
Returns the value of the key stored or retrieved on success,
False on error.
"""
if default is None:
raise ValueError('You need to specify a value.')
val = self.get(key, version=version)
if val is None:
if callable(default):
default = default()
val = self.add(key, default, timeout=timeout, version=version)
if val:
return self.get(key, version=version)
return val
def has_key(self, key, version=None):
"""
Returns True if the key is in the cache and has not expired.
"""
return self.get(key, version=version) is not None
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
value = self.get(key, version=version)
if value is None:
raise ValueError("Key '%s' not found" % key)
new_value = value + delta
self.set(key, new_value, version=version)
return new_value
def decr(self, key, delta=1, version=None):
"""
Subtract delta from value in the cache. If the key does not exist, raise
a ValueError exception.
"""
return self.incr(key, -delta, version=version)
def __contains__(self, key):
"""
Returns True if the key is in the cache and has not expired.
"""
# This is a separate method, rather than just a copy of has_key(),
# so that it always has the same functionality as has_key(), even
# if a subclass overrides it.
return self.has_key(key)
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. For certain backends (memcached), this is much more efficient
than calling set() multiple times.
If timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
"""
for key, value in data.items():
self.set(key, value, timeout=timeout, version=version)
def delete_many(self, keys, version=None):
"""
Set a bunch of values in the cache at once. For certain backends
(memcached), this is much more efficient than calling delete() multiple
times.
"""
for key in keys:
self.delete(key, version=version)
def clear(self):
"""Remove *all* values from the cache at once."""
raise NotImplementedError('subclasses of BaseCache must provide a clear() method')
def validate_key(self, key):
"""
Warn about keys that would not be portable to the memcached
backend. This encourages (but does not force) writing backend-portable
cache code.
"""
if len(key) > MEMCACHE_MAX_KEY_LENGTH:
warnings.warn('Cache key will cause errors if used with memcached: '
'%s (longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH),
CacheKeyWarning)
for char in key:
if ord(char) < 33 or ord(char) == 127:
warnings.warn('Cache key contains characters that will cause '
'errors if used with memcached: %r' % key,
CacheKeyWarning)
def incr_version(self, key, delta=1, version=None):
"""Adds delta to the cache version for the supplied key. Returns the
new version.
"""
if version is None:
version = self.version
value = self.get(key, version=version)
if value is None:
raise ValueError("Key '%s' not found" % key)
self.set(key, value, version=version + delta)
self.delete(key, version=version)
return version + delta
def decr_version(self, key, delta=1, version=None):
"""Substracts delta from the cache version for the supplied key. Returns
the new version.
"""
return self.incr_version(key, -delta, version)
def close(self, **kwargs):
"""Close the cache connection"""
pass
| bsd-3-clause |
ksachs/invenio | modules/websession/lib/webgroup_dblayer.py | 33 | 14303 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Database related functions for groups"""
__revision__ = "$Id$"
from time import localtime
from zlib import decompress
from invenio.config import CFG_SITE_LANG
from invenio.dbquery import run_sql, OperationalError
from invenio.dateutils import convert_datestruct_to_datetext
from invenio.messages import gettext_set_language
from invenio.websession_config import CFG_WEBSESSION_GROUP_JOIN_POLICY
def get_groups_by_user_status(uid, user_status, login_method='INTERNAL'):
"""Select all the groups the user is admin of.
@param uid: user id
@return: ((id_usergroup,
group_name,
group_description, ))
"""
query = """SELECT g.id,
g.name,
g.description
FROM usergroup g, user_usergroup ug
WHERE ug.id_user=%s AND
ug.id_usergroup=g.id AND
ug.user_status=%s AND
g.login_method = %s
ORDER BY g.name"""
uid = int(uid)
res = run_sql(query, (uid, user_status, login_method))
return res
def get_groups_by_login_method(uid, login_method):
"""Select all the groups the user is member of selecting the login_method.
@param uid: user id
@param login_method: the login_method (>0 external)
@return: ((id_usergroup,
group_name,
group_description, ))
"""
query = """SELECT g.id,
g.name,
g.description
FROM usergroup g, user_usergroup ug
WHERE ug.id_user=%s AND
ug.id_usergroup=g.id AND
g.login_method=%s
ORDER BY g.name"""
uid = int(uid)
res = run_sql(query, (uid, login_method))
return res
def get_groups_with_description(uid):
"""Select all the groups the user is member of.
@param uid: user id
@return: ((id_usergroup,
group_name,
group_description, ))
"""
query = """SELECT g.id,
g.name,
g.description
FROM usergroup g, user_usergroup ug
WHERE ug.id_user=%s AND
ug.id_usergroup=g.id
ORDER BY g.name"""
uid = int(uid)
res = run_sql(query, (uid, ))
return res
def get_external_groups(uid):
"""Select all the groups the user is member of selecting the login_method.
@param uid: user id
@param login_method: the login_method (>0 external)
@return: ((id_usergroup,
group_name,
group_description, ))
"""
query = """SELECT g.id,
g.name,
g.description
FROM usergroup g, user_usergroup ug
WHERE ug.id_user=%s AND
ug.id_usergroup=g.id AND
g.login_method != 'INTERNAL'
ORDER BY g.name"""
uid = int(uid)
res = run_sql(query, (uid, ))
return res
def get_groups(uid):
"""Select all the groups id the user is member of."""
query = """SELECT g.id, g.name
FROM usergroup g, user_usergroup ug
WHERE ug.id_user=%s AND
ug.id_usergroup=g.id
"""
res = run_sql(query, (uid, ))
res = list(res)
return res
def get_group_id(group_name, login_method='INTERNAL'):
"""@return: the id of the group called group_name with given login_method."""
return run_sql("""
SELECT id FROM usergroup
WHERE login_method = %s AND name = %s""", (login_method, group_name,))
def get_login_method_groups(uid, login_method='INTERNAL'):
"""Select all the external groups of a particular login_method for which
the user is subscrided.
@return: ((group_name, group_id))
"""
return run_sql("""
SELECT g.name as name, g.id as id
FROM user_usergroup as u JOIN usergroup as g
ON u.id_usergroup = g.id
WHERE u.id_user = %s and g.login_method = %s""",
(uid, login_method,))
def get_all_login_method_groups(login_method):
"""Select all the external groups of a particular login_method.
@return: ({group_name: group_id, ...})
"""
return dict(run_sql("""
SELECT name, id
FROM usergroup
WHERE login_method = %s""",
(login_method,)))
def get_all_users_with_groups_with_login_method(login_method):
"""Select all the users that belong at least to one external group
of kind login_method.
"""
return dict(run_sql("""
SELECT DISTINCT u.email, u.id
FROM user AS u JOIN user_usergroup AS uu ON u.id = uu.id_user
JOIN usergroup AS ug ON ug.id = uu.id_usergroup
WHERE ug.login_method = %s""", (login_method,)))
def get_visible_group_list(uid, pattern=""):
"""List the group the user can join (not already member
of the group regardless user's status).
@return: groups {id : name} whose name matches pattern
"""
grpID = []
groups = {}
#list the group the user is member of"""
query = """SELECT distinct(id_usergroup)
FROM user_usergroup
WHERE id_user=%s """
uid = int(uid)
res = run_sql(query, (uid,))
map(lambda x: grpID.append(int(x[0])), res)
query2 = """SELECT id,name
FROM usergroup
WHERE (join_policy='%s' OR join_policy='%s')""" % (
CFG_WEBSESSION_GROUP_JOIN_POLICY['VISIBLEOPEN'],
CFG_WEBSESSION_GROUP_JOIN_POLICY['VISIBLEMAIL'])
if len(grpID) == 1 :
query2 += """ AND id!=%i""" % grpID[0]
elif len(grpID) > 1:
query2 += """ AND id NOT IN %s""" % str(tuple(grpID))
if pattern:
try:
res2 = run_sql(query2 + """ AND name RLIKE %s ORDER BY name""", (pattern,))
except OperationalError:
res2 = ()
else:
res2 = run_sql(query2 + """ ORDER BY name""")
map(lambda x: groups.setdefault(x[0], x[1]), res2)
return groups
def insert_new_group(uid,
new_group_name,
new_group_description,
join_policy,
login_method='INTERNAL'):
"""Create a new group and affiliate a user."""
query1 = """INSERT INTO usergroup (id, name, description, join_policy,
login_method)
VALUES (NULL,%s,%s,%s,%s)
"""
params1 = (new_group_name,
new_group_description,
join_policy,
login_method)
res1 = run_sql(query1, params1)
date = convert_datestruct_to_datetext(localtime())
uid = int(uid)
query2 = """INSERT INTO user_usergroup (id_user, id_usergroup, user_status,
user_status_date)
VALUES (%s,%s,'A',%s)
"""
params2 = (uid, res1, date)
res2 = run_sql(query2, params2)
return res1
def insert_only_new_group(new_group_name,
new_group_description,
join_policy,
login_method='INTERNAL'):
"""Create a group with no user in (yet).
@return: its id
"""
query = """INSERT INTO usergroup (name, description, join_policy, login_method)
VALUES (%s, %s, %s, %s)
"""
res = run_sql(query, (new_group_name, new_group_description, join_policy, login_method))
return res
def insert_new_member(uid,
grpID,
status):
"""Insert new member."""
query = """INSERT INTO user_usergroup (id_user, id_usergroup, user_status,
user_status_date)
VALUES (%s,%s,%s,%s)
"""
date = convert_datestruct_to_datetext(localtime())
res = run_sql(query, (uid, grpID, status, date))
return res
def get_group_infos(grpID):
"""Get group infos."""
query = """SELECT id,name,description,join_policy,login_method FROM usergroup
WHERE id = %s"""
res = run_sql(query, (grpID, ))
return res
def get_all_groups_description(login_method):
"""Get all groups description, dictionary with key name."""
query = """SELECT name, description
FROM usergroup
WHERE login_method = %s
"""
res = run_sql(query, (login_method, ))
if res:
return dict(res)
else:
return {}
def update_group_infos(grpID,
group_name,
group_description,
join_policy):
"""Update group."""
res = run_sql("""UPDATE usergroup
SET name=%s, description=%s, join_policy=%s
WHERE id=%s""",
(group_name, group_description, join_policy, grpID))
return res
def get_user_status(uid, grpID):
"""Get the status of the user for the given group."""
query = """SELECT user_status FROM user_usergroup
WHERE id_user = %s
AND id_usergroup=%s"""
uid = int(uid)
res = run_sql(query, (uid, grpID))
return res
def get_users_by_status(grpID, status, ln=CFG_SITE_LANG):
"""Get the list of users with the given status.
@return: ((id, nickname),) nickname= user # uid if
the user has no nickname
"""
_ = gettext_set_language(ln)
res = run_sql("""SELECT ug.id_user, u.nickname
FROM user_usergroup ug, user u
WHERE ug.id_usergroup = %s
AND ug.id_user=u.id
AND user_status = %s""",
(grpID, status))
users = []
if res:
for (mid, nickname) in res:
nn = nickname
if not nickname:
nn = _("user") + "#%i" % mid
users.append((mid, nn))
return tuple(users)
def delete_member(grpID, member_id):
"""Delete member."""
query = """DELETE FROM user_usergroup
WHERE id_usergroup = %s
AND id_user = %s"""
member_id = int(member_id)
res = run_sql(query, (grpID, member_id))
return res
def delete_group_and_members(grpID):
"""Delete the group and its members."""
query = """DELETE FROM usergroup
WHERE id = %s
"""
res = run_sql(query, (grpID,))
query = """DELETE FROM user_usergroup
WHERE id_usergroup = %s
"""
res = run_sql(query, (grpID,))
return res
def add_pending_member(grpID, member_id, user_status):
"""Change user status:
Pending member becomes normal member"""
date = convert_datestruct_to_datetext(localtime())
res = run_sql("""UPDATE user_usergroup
SET user_status = %s, user_status_date = %s
WHERE id_usergroup = %s
AND id_user = %s""",
(user_status, date, grpID, member_id))
return res
def leave_group(grpID, uid):
"""Remove user from the group member list."""
query = """DELETE FROM user_usergroup
WHERE id_usergroup=%s
AND id_user=%s"""
uid = int(uid)
res = run_sql(query, (grpID, uid))
return res
def drop_external_groups(userId):
"""Drops all the external groups memberships of userid."""
query = """DELETE user_usergroup FROM user_usergroup, usergroup
WHERE user_usergroup.id_user=%s
AND usergroup.id = user_usergroup.id_usergroup
AND usergroup.login_method <> 'INTERNAL'"""
return run_sql(query, (userId,))
def group_name_exist(group_name, login_method='INTERNAL'):
"""Get all group id whose name like group_name and login_method."""
query = """SELECT id
FROM usergroup
WHERE login_method=%s AND name=%s"""
res = run_sql(query, (login_method, group_name))
return res
def get_group_login_method(grpID):
"""Return the login_method of the group or None if the grpID doesn't exist."""
query = """SELECT login_method
FROM usergroup
WHERE id=%s"""
res = run_sql(query, (grpID, ))
if res:
return res[0][0]
else:
return None
def count_nb_group_user(uid, user_status):
"""
@param uid: user id
@param status: member status
@return: integer of number of groups the user belongs to
with the given status, 0 if none
"""
res = run_sql("""SELECT count(id_user)
FROM user_usergroup
WHERE id_user = %s
AND user_status = %s""",
(uid, user_status))
if res:
return int(res[0][0])
else:
return 0
def get_all_users():
"""@return: all the email:id"""
query = """SELECT UPPER(email), id
FROM user
WHERE email != ''
"""
res = run_sql(query)
if res:
return dict(res)
else:
return {}
def get_users_in_group(grpID):
"""@return: all uids of users belonging to group grpID"""
query = """SELECT id_user
FROM user_usergroup
WHERE id_usergroup = %s
"""
res = run_sql(query, (grpID, ))
return [uid[0] for uid in res]
########################## helpful functions ##################################
def __decompress_last(item):
"""private function, used to shorten code"""
item = list(item)
item[-1] = decompress(item[-1])
return item
| gpl-2.0 |
kmonsoor/python-for-android | python3-alpha/extra_modules/gdata/Crypto/Protocol/Chaffing.py | 44 | 9496 | """This file implements the chaffing algorithm.
Winnowing and chaffing is a technique for enhancing privacy without requiring
strong encryption. In short, the technique takes a set of authenticated
message blocks (the wheat) and adds a number of chaff blocks which have
randomly chosen data and MAC fields. This means that to an adversary, the
chaff blocks look as valid as the wheat blocks, and so the authentication
would have to be performed on every block. By tailoring the number of chaff
blocks added to the message, the sender can make breaking the message
computationally infeasible. There are many other interesting properties of
the winnow/chaff technique.
For example, say Alice is sending a message to Bob. She packetizes the
message and performs an all-or-nothing transformation on the packets. Then
she authenticates each packet with a message authentication code (MAC). The
MAC is a hash of the data packet, and there is a secret key which she must
share with Bob (key distribution is an exercise left to the reader). She then
adds a serial number to each packet, and sends the packets to Bob.
Bob receives the packets, and using the shared secret authentication key,
authenticates the MACs for each packet. Those packets that have bad MACs are
simply discarded. The remainder are sorted by serial number, and passed
through the reverse all-or-nothing transform. The transform means that an
eavesdropper (say Eve) must acquire all the packets before any of the data can
be read. If even one packet is missing, the data is useless.
There's one twist: by adding chaff packets, Alice and Bob can make Eve's job
much harder, since Eve now has to break the shared secret key, or try every
combination of wheat and chaff packet to read any of the message. The cool
thing is that Bob doesn't need to add any additional code; the chaff packets
are already filtered out because their MACs don't match (in all likelihood --
since the data and MACs for the chaff packets are randomly chosen it is
possible, but very unlikely that a chaff MAC will match the chaff data). And
Alice need not even be the party adding the chaff! She could be completely
unaware that a third party, say Charles, is adding chaff packets to her
messages as they are transmitted.
For more information on winnowing and chaffing see this paper:
Ronald L. Rivest, "Chaffing and Winnowing: Confidentiality without Encryption"
http://theory.lcs.mit.edu/~rivest/chaffing.txt
"""
__revision__ = "$Id: Chaffing.py,v 1.7 2003/02/28 15:23:21 akuchling Exp $"
from Crypto.Util.number import bytes_to_long
class Chaff:
"""Class implementing the chaff adding algorithm.
Methods for subclasses:
_randnum(size):
Returns a randomly generated number with a byte-length equal
to size. Subclasses can use this to implement better random
data and MAC generating algorithms. The default algorithm is
probably not very cryptographically secure. It is most
important that the chaff data does not contain any patterns
that can be used to discern it from wheat data without running
the MAC.
"""
def __init__(self, factor=1.0, blocksper=1):
"""Chaff(factor:float, blocksper:int)
factor is the number of message blocks to add chaff to,
expressed as a percentage between 0.0 and 1.0. blocksper is
the number of chaff blocks to include for each block being
chaffed. Thus the defaults add one chaff block to every
message block. By changing the defaults, you can adjust how
computationally difficult it could be for an adversary to
brute-force crack the message. The difficulty is expressed
as:
pow(blocksper, int(factor * number-of-blocks))
For ease of implementation, when factor < 1.0, only the first
int(factor*number-of-blocks) message blocks are chaffed.
"""
if not (0.0<=factor<=1.0):
raise ValueError("'factor' must be between 0.0 and 1.0")
if blocksper < 0:
raise ValueError("'blocksper' must be zero or more")
self.__factor = factor
self.__blocksper = blocksper
def chaff(self, blocks):
"""chaff( [(serial-number:int, data:string, MAC:string)] )
: [(int, string, string)]
Add chaff to message blocks. blocks is a list of 3-tuples of the
form (serial-number, data, MAC).
Chaff is created by choosing a random number of the same
byte-length as data, and another random number of the same
byte-length as MAC. The message block's serial number is
placed on the chaff block and all the packet's chaff blocks
are randomly interspersed with the single wheat block. This
method then returns a list of 3-tuples of the same form.
Chaffed blocks will contain multiple instances of 3-tuples
with the same serial number, but the only way to figure out
which blocks are wheat and which are chaff is to perform the
MAC hash and compare values.
"""
chaffedblocks = []
# count is the number of blocks to add chaff to. blocksper is the
# number of chaff blocks to add per message block that is being
# chaffed.
count = len(blocks) * self.__factor
blocksper = list(range(self.__blocksper))
for i, wheat in map(None, list(range(len(blocks))), blocks):
# it shouldn't matter which of the n blocks we add chaff to, so for
# ease of implementation, we'll just add them to the first count
# blocks
if i < count:
serial, data, mac = wheat
datasize = len(data)
macsize = len(mac)
addwheat = 1
# add chaff to this block
for j in blocksper:
import sys
chaffdata = self._randnum(datasize)
chaffmac = self._randnum(macsize)
chaff = (serial, chaffdata, chaffmac)
# mix up the order, if the 5th bit is on then put the
# wheat on the list
if addwheat and bytes_to_long(self._randnum(16)) & 0x40:
chaffedblocks.append(wheat)
addwheat = 0
chaffedblocks.append(chaff)
if addwheat:
chaffedblocks.append(wheat)
else:
# just add the wheat
chaffedblocks.append(wheat)
return chaffedblocks
def _randnum(self, size):
# TBD: Not a very secure algorithm.
# TBD: size * 2 to work around possible bug in RandomPool
from Crypto.Util import randpool
import time
pool = randpool.RandomPool(size * 2)
while size > pool.entropy:
pass
# we now have enough entropy in the pool to get size bytes of random
# data... well, probably
return pool.get_bytes(size)
if __name__ == '__main__':
text = """\
We hold these truths to be self-evident, that all men are created equal, that
they are endowed by their Creator with certain unalienable Rights, that among
these are Life, Liberty, and the pursuit of Happiness. That to secure these
rights, Governments are instituted among Men, deriving their just powers from
the consent of the governed. That whenever any Form of Government becomes
destructive of these ends, it is the Right of the People to alter or to
abolish it, and to institute new Government, laying its foundation on such
principles and organizing its powers in such form, as to them shall seem most
likely to effect their Safety and Happiness.
"""
print('Original text:\n==========')
print(text)
print('==========')
# first transform the text into packets
blocks = [] ; size = 40
for i in range(0, len(text), size):
blocks.append( text[i:i+size] )
# now get MACs for all the text blocks. The key is obvious...
print('Calculating MACs...')
from Crypto.Hash import HMAC, SHA
key = 'Jefferson'
macs = [HMAC.new(key, block, digestmod=SHA).digest()
for block in blocks]
assert len(blocks) == len(macs)
# put these into a form acceptable as input to the chaffing procedure
source = []
m = map(None, list(range(len(blocks))), blocks, macs)
print(m)
for i, data, mac in m:
source.append((i, data, mac))
# now chaff these
print('Adding chaff...')
c = Chaff(factor=0.5, blocksper=2)
chaffed = c.chaff(source)
from base64 import encodestring
# print the chaffed message blocks. meanwhile, separate the wheat from
# the chaff
wheat = []
print('chaffed message blocks:')
for i, data, mac in chaffed:
# do the authentication
h = HMAC.new(key, data, digestmod=SHA)
pmac = h.digest()
if pmac == mac:
tag = '-->'
wheat.append(data)
else:
tag = ' '
# base64 adds a trailing newline
print(tag, '%3d' % i, \
repr(data), encodestring(mac)[:-1])
# now decode the message packets and check it against the original text
print('Undigesting wheat...')
newtext = "".join(wheat)
if newtext == text:
print('They match!')
else:
print('They differ!')
| apache-2.0 |
oscarolar/odoo | openerp/report/printscreen/ps_form.py | 381 | 5211 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp
from openerp.report.interface import report_int
import openerp.tools as tools
from openerp.report import render
from lxml import etree
import time, os
class report_printscreen_list(report_int):
def __init__(self, name):
report_int.__init__(self, name)
def _parse_node(self, root_node):
result = []
for node in root_node:
if node.tag == 'field':
attrsa = node.attrib
attrs = {}
if not attrsa is None:
for key,val in attrsa.items():
attrs[key] = val
result.append(attrs['name'])
else:
result.extend(self._parse_node(node))
return result
def _parse_string(self, view):
dom = etree.XML(view)
return self._parse_node(dom)
def create(self, cr, uid, ids, datas, context=None):
if not context:
context={}
datas['ids'] = ids
registry = openerp.registry(cr.dbname)
model = registry[datas['model']]
# title come from description of model which are specified in py file.
self.title = model._description
result = model.fields_view_get(cr, uid, view_type='form', context=context)
fields_order = self._parse_string(result['arch'])
rows = model.read(cr, uid, datas['ids'], result['fields'].keys() )
self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model._description)
return self.obj.get(), 'pdf'
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
pageSize=[297.0,210.0]
new_doc = etree.Element("report")
config = etree.SubElement(new_doc, 'config')
# build header
def _append_node(name, text):
n = etree.SubElement(config, name)
n.text = text
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
_append_node('report-header', title)
l = []
t = 0
strmax = (pageSize[0]-40) * 2.8346
for f in fields_order:
s = 0
if fields[f]['type'] in ('date','time','float','integer'):
s = 60
strmax -= s
else:
t += fields[f].get('size', 56) / 28 + 1
l.append(s)
for pos in range(len(l)):
if not l[pos]:
s = fields[fields_order[pos]].get('size', 56) / 28 + 1
l[pos] = strmax * s / t
_append_node('tableSize', ','.join(map(str,l)) )
header = etree.SubElement(new_doc, 'header')
for f in fields_order:
field = etree.SubElement(header, 'field')
field.text = fields[f]['string'] or ''
lines = etree.SubElement(new_doc, 'lines')
for line in results:
node_line = etree.SubElement(lines, 'row')
for f in fields_order:
if fields[f]['type']=='many2one' and line[f]:
line[f] = line[f][1]
if fields[f]['type'] in ('one2many','many2many') and line[f]:
line[f] = '( '+str(len(line[f])) + ' )'
if fields[f]['type'] == 'float':
precision=(('digits' in fields[f]) and fields[f]['digits'][1]) or 2
line[f]=round(line[f],precision)
col = etree.SubElement(node_line, 'col', tree='no')
if line[f] is not None:
col.text = tools.ustr(line[f] or '')
else:
col.text = '/'
transform = etree.XSLT(
etree.parse(os.path.join(tools.config['root_path'],
'addons/base/report/custom_new.xsl')))
rml = etree.tostring(transform(new_doc))
self.obj = render.rml(rml, self.title)
self.obj.render()
return True
report_printscreen_list('report.printscreen.form')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
petertodd/checklocktimeverify-demos | lib/python-bitcoinlib/bitcoin/signature.py | 10 | 1637 | # Copyright (C) 2012-2014 The python-bitcoinlib developers
#
# This file is part of python-bitcoinlib.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-bitcoinlib, including this file, may be copied, modified,
# propagated, or distributed except according to the terms contained in the
# LICENSE file.
from __future__ import absolute_import, division, print_function, unicode_literals
from bitcoin.core.serialize import *
# Py3 compatibility
import sys
if sys.version > '3':
from io import BytesIO as _BytesIO
else:
from cStringIO import StringIO as _BytesIO
class DERSignature(ImmutableSerializable):
__slots__ = ['length', 'r', 's']
def __init__(self, r, s, length):
object.__setattr__(self, 'r', r)
object.__setattr__(self, 's', s)
object.__setattr__(self, 'length', length)
@classmethod
def stream_deserialize(cls, f):
assert ser_read(f, 1) == b"\x30"
rs = BytesSerializer.stream_deserialize(f)
f = _BytesIO(rs)
assert ser_read(f, 1) == b"\x02"
r = BytesSerializer.stream_deserialize(f)
assert ser_read(f, 1) == b"\x02"
s = BytesSerializer.stream_deserialize(f)
return cls(r, s, len(r + s))
def stream_serialize(self, f):
f.write(b"\x30")
f.write(b"\x02")
BytesSerializer.stream_serialize(self.r, f)
f.write(b"\x30")
BytesSerializer.stream_serialize(self.s, f)
def __repr__(self):
return 'DERSignature(%s, %s)' % (self.r, self.s)
__all__ = (
'DERSignature',
)
| gpl-3.0 |
neslihanturan/artge | app/signals.py | 1 | 2014 | from django.db.models.signals import post_save, pre_delete, pre_save
from django.dispatch import receiver
from app.forms import MenuForm
from django import forms
from app.models import *
from django.core.exceptions import ObjectDoesNotExist
#from django.db.models.base import ObjectDoesNotExist
@receiver(post_save, sender=SubMenuItem)
def sub_menu_post_save(sender, **kwargs):
try:
instance = kwargs.get('instance', None)
if instance.menuitem.smenus.count() == 1: # first sub menu created
print('ilkini olusturdun')
MenuItem.objects.filter(pk=instance.menuitem.pk).update(is_active = False) #if dont update db, only this instance will be updated
instance.menuitem.refresh_from_db() #to make all instances syncronised
print("from signals= title:",instance.menuitem.item_name," id:",id(instance.menuitem),", aktif mi:",instance.menuitem.is_active)
##delete code here
###MenuForm.remove_field(instance.menuitem.form)
print("2. message",instance.menuitem.smenus.count())
except (MenuItem.DoesNotExist, ObjectDoesNotExist) as e:
print('error2')
@receiver(pre_delete, sender=SubMenuItem)
def sub_menu_pre_delete(sender, **kwargs):
print("deleting")
try:
instance = kwargs.get('instance', None)
if instance.menuitem.smenus.count() == 1: # last remained sub menu will be deleted soon
print('hepsini sildin')
MenuItem.objects.filter(pk=instance.menuitem.pk).update(is_active = True) #if dont update db, only this instance will be updated
instance.menuitem.refresh_from_db() #to make all instances syncronised
print("1. message",instance.menuitem.smenus.count())
except (MenuItem.DoesNotExist, ObjectDoesNotExist) as e:
print('error1')
post_save.connect(sub_menu_post_save, sender=SubMenuItem, dispatch_uid="sub_menu_post_save")
pre_delete.connect(sub_menu_pre_delete, sender=SubMenuItem, dispatch_uid="sub_menu_pre_delete")
| gpl-3.0 |
Boussadia/weboob | modules/hybride/calendar.py | 6 | 1146 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.calendar import BaseCalendarEvent, TRANSP, STATUS, CATEGORIES
class HybrideCalendarEvent(BaseCalendarEvent):
def __init__(self):
BaseCalendarEvent.__init__(self)
self.city = u'Lille'
self.location = u'18 rue Gosselet'
self.sequence = 1
self.transp = TRANSP.TRANSPARENT
self.status = STATUS.CONFIRMED
self.category = CATEGORIES.CINE
| agpl-3.0 |
googlefonts/cu2qu | tests/utils.py | 2 | 9055 | from __future__ import print_function, division, absolute_import
from . import CUBIC_GLYPHS
from fontTools.pens.pointPen import PointToSegmentPen, SegmentToPointPen
from fontTools.misc.py23 import isclose
import unittest
class BaseDummyPen(object):
"""Base class for pens that record the commands they are called with."""
def __init__(self, *args, **kwargs):
self.commands = []
def __str__(self):
"""Return the pen commands as a string of python code."""
return _repr_pen_commands(self.commands)
def addComponent(self, glyphName, transformation, **kwargs):
self.commands.append(('addComponent', (glyphName, transformation), kwargs))
class DummyPen(BaseDummyPen):
"""A SegmentPen that records the commands it's called with."""
def moveTo(self, pt):
self.commands.append(('moveTo', (pt,), {}))
def lineTo(self, pt):
self.commands.append(('lineTo', (pt,), {}))
def curveTo(self, *points):
self.commands.append(('curveTo', points, {}))
def qCurveTo(self, *points):
self.commands.append(('qCurveTo', points, {}))
def closePath(self):
self.commands.append(('closePath', tuple(), {}))
def endPath(self):
self.commands.append(('endPath', tuple(), {}))
class DummyPointPen(BaseDummyPen):
"""A PointPen that records the commands it's called with."""
def beginPath(self, **kwargs):
self.commands.append(('beginPath', tuple(), kwargs))
def endPath(self):
self.commands.append(('endPath', tuple(), {}))
def addPoint(self, pt, segmentType=None, smooth=False, name=None, **kwargs):
kwargs['segmentType'] = str(segmentType) if segmentType else None
kwargs['smooth'] = smooth
kwargs['name'] = name
self.commands.append(('addPoint', (pt,), kwargs))
class DummyGlyph(object):
"""Provides a minimal interface for storing a glyph's outline data in a
SegmentPen-oriented way. The glyph's outline consists in the list of
SegmentPen commands required to draw it.
"""
# the SegmentPen class used to draw on this glyph type
DrawingPen = DummyPen
def __init__(self, glyph=None):
"""If another glyph (i.e. any object having a 'draw' method) is given,
its outline data is copied to self.
"""
self._pen = self.DrawingPen()
self.outline = self._pen.commands
if glyph:
self.appendGlyph(glyph)
def appendGlyph(self, glyph):
"""Copy another glyph's outline onto self."""
glyph.draw(self._pen)
def getPen(self):
"""Return the SegmentPen that can 'draw' on this glyph."""
return self._pen
def getPointPen(self):
"""Return a PointPen adapter that can 'draw' on this glyph."""
return PointToSegmentPen(self._pen)
def draw(self, pen):
"""Use another SegmentPen to replay the glyph's outline commands."""
if self.outline:
for cmd, args, kwargs in self.outline:
getattr(pen, cmd)(*args, **kwargs)
def drawPoints(self, pointPen):
"""Use another PointPen to replay the glyph's outline commands,
indirectly through an adapter.
"""
pen = SegmentToPointPen(pointPen)
self.draw(pen)
def __eq__(self, other):
"""Return True if 'other' glyph's outline is the same as self."""
if hasattr(other, 'outline'):
return self.outline == other.outline
elif hasattr(other, 'draw'):
return self.outline == self.__class__(other).outline
return NotImplemented
def __ne__(self, other):
"""Return True if 'other' glyph's outline is different from self."""
return not (self == other)
def approx(self, other, rel_tol=1e-12):
if hasattr(other, 'outline'):
outline2 == other.outline
elif hasattr(other, 'draw'):
outline2 = self.__class__(other).outline
else:
raise TypeError(type(other).__name__)
outline1 = self.outline
if len(outline1) != len(outline2):
return False
for (cmd1, arg1, kwd1), (cmd2, arg2, kwd2) in zip(outline1, outline2):
if cmd1 != cmd2:
return False
if kwd1 != kwd2:
return False
if arg1:
if isinstance(arg1[0], tuple):
if not arg2 or not isinstance(arg2[0], tuple):
return False
for (x1, y1), (x2, y2) in zip(arg1, arg2):
if (
not isclose(x1, x2, rel_tol=rel_tol) or
not isclose(y1, y2, rel_tol=rel_tol)
):
return False
elif arg1 != arg2:
return False
elif arg2:
return False
return True
def __str__(self):
"""Return commands making up the glyph's outline as a string."""
return str(self._pen)
class DummyPointGlyph(DummyGlyph):
"""Provides a minimal interface for storing a glyph's outline data in a
PointPen-oriented way. The glyph's outline consists in the list of
PointPen commands required to draw it.
"""
# the PointPen class used to draw on this glyph type
DrawingPen = DummyPointPen
def appendGlyph(self, glyph):
"""Copy another glyph's outline onto self."""
glyph.drawPoints(self._pen)
def getPen(self):
"""Return a SegmentPen adapter that can 'draw' on this glyph."""
return SegmentToPointPen(self._pen)
def getPointPen(self):
"""Return the PointPen that can 'draw' on this glyph."""
return self._pen
def draw(self, pen):
"""Use another SegmentPen to replay the glyph's outline commands,
indirectly through an adapter.
"""
pointPen = PointToSegmentPen(pen)
self.drawPoints(pointPen)
def drawPoints(self, pointPen):
"""Use another PointPen to replay the glyph's outline commands."""
if self.outline:
for cmd, args, kwargs in self.outline:
getattr(pointPen, cmd)(*args, **kwargs)
def _repr_pen_commands(commands):
"""
>>> print(_repr_pen_commands([
... ('moveTo', tuple(), {}),
... ('lineTo', ((1.0, 0.1),), {}),
... ('curveTo', ((1.0, 0.1), (2.0, 0.2), (3.0, 0.3)), {})
... ]))
pen.moveTo()
pen.lineTo((1, 0.1))
pen.curveTo((1, 0.1), (2, 0.2), (3, 0.3))
>>> print(_repr_pen_commands([
... ('beginPath', tuple(), {}),
... ('addPoint', ((1.0, 0.1),),
... {"segmentType":"line", "smooth":True, "name":"test", "z":1}),
... ]))
pen.beginPath()
pen.addPoint((1, 0.1), name='test', segmentType='line', smooth=True, z=1)
>>> print(_repr_pen_commands([
... ('addComponent', ('A', (1, 0, 0, 1, 0, 0)), {})
... ]))
pen.addComponent('A', (1, 0, 0, 1, 0, 0))
"""
s = []
for cmd, args, kwargs in commands:
if args:
if isinstance(args[0], tuple):
# cast float to int if there're no digits after decimal point,
# and round floats to 12 decimal digits (more than enough)
args = [
tuple((int(v) if int(v) == v else round(v, 12)) for v in pt)
for pt in args
]
args = ", ".join(repr(a) for a in args)
if kwargs:
kwargs = ", ".join("%s=%r" % (k, v)
for k, v in sorted(kwargs.items()))
if args and kwargs:
s.append("pen.%s(%s, %s)" % (cmd, args, kwargs))
elif args:
s.append("pen.%s(%s)" % (cmd, args))
elif kwargs:
s.append("pen.%s(%s)" % (cmd, kwargs))
else:
s.append("pen.%s()" % cmd)
return "\n".join(s)
class TestDummyGlyph(unittest.TestCase):
def test_equal(self):
# verify that the copy and the copy of the copy are equal to
# the source glyph's outline, as well as to each other
source = CUBIC_GLYPHS['a']
copy = DummyGlyph(source)
copy2 = DummyGlyph(copy)
self.assertEqual(source, copy)
self.assertEqual(source, copy2)
self.assertEqual(copy, copy2)
# assert equality doesn't hold any more after modification
copy.outline.pop()
self.assertNotEqual(source, copy)
self.assertNotEqual(copy, copy2)
class TestDummyPointGlyph(unittest.TestCase):
def test_equal(self):
# same as above but using the PointPen protocol
source = CUBIC_GLYPHS['a']
copy = DummyPointGlyph(source)
copy2 = DummyPointGlyph(copy)
self.assertEqual(source, copy)
self.assertEqual(source, copy2)
self.assertEqual(copy, copy2)
copy.outline.pop()
self.assertNotEqual(source, copy)
self.assertNotEqual(copy, copy2)
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
Aristocles/CouchPotatoServer | libs/chardet/charsetgroupprober.py | 217 | 3642 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import constants, sys
from charsetprober import CharSetProber
class CharSetGroupProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mActiveNum = 0
self._mProbers = []
self._mBestGuessProber = None
def reset(self):
CharSetProber.reset(self)
self._mActiveNum = 0
for prober in self._mProbers:
if prober:
prober.reset()
prober.active = constants.True
self._mActiveNum += 1
self._mBestGuessProber = None
def get_charset_name(self):
if not self._mBestGuessProber:
self.get_confidence()
if not self._mBestGuessProber: return None
# self._mBestGuessProber = self._mProbers[0]
return self._mBestGuessProber.get_charset_name()
def feed(self, aBuf):
for prober in self._mProbers:
if not prober: continue
if not prober.active: continue
st = prober.feed(aBuf)
if not st: continue
if st == constants.eFoundIt:
self._mBestGuessProber = prober
return self.get_state()
elif st == constants.eNotMe:
prober.active = constants.False
self._mActiveNum -= 1
if self._mActiveNum <= 0:
self._mState = constants.eNotMe
return self.get_state()
return self.get_state()
def get_confidence(self):
st = self.get_state()
if st == constants.eFoundIt:
return 0.99
elif st == constants.eNotMe:
return 0.01
bestConf = 0.0
self._mBestGuessProber = None
for prober in self._mProbers:
if not prober: continue
if not prober.active:
if constants._debug:
sys.stderr.write(prober.get_charset_name() + ' not active\n')
continue
cf = prober.get_confidence()
if constants._debug:
sys.stderr.write('%s confidence = %s\n' % (prober.get_charset_name(), cf))
if bestConf < cf:
bestConf = cf
self._mBestGuessProber = prober
if not self._mBestGuessProber: return 0.0
return bestConf
# else:
# self._mBestGuessProber = self._mProbers[0]
# return self._mBestGuessProber.get_confidence()
| gpl-3.0 |
jobelenus/thegreco | ignore/tracegen.py | 1 | 1364 | #!/usr/bin/env python
# vim:ts=4:sts=4:sw=4:et:wrap:ai:fileencoding=utf-8:
import collections
#import matplotlib.pyplot as plt
factor = 1/4
class TraceGenerator():
def __init__(self):
fname='/Users/jobelenus/work/thegreco/cpu.entries'
self.fname = fname
with open(self.fname) as f:
self.lines = f.readlines()
self.cpu = map(int, self.lines)
def gen_cpu_trace(self):
return self.cpu
def gen_mem_trace(self):
self.mem = collections.deque(self.cpu)
self.mem.rotate(len(self.cpu)/4)
return self.mem
def gen_disk_trace(self):
self.disk = collections.deque(self.cpu)
self.disk.rotate(2*len(self.cpu)/4)
return self.disk
def gen_net_trace(self):
self.net = collections.deque(self.cpu)
self.net.rotate(3*len(self.cpu)/4)
return self.net
def gen_trace(self):
self.gen_cpu_trace()
self.gen_mem_trace()
self.gen_disk_trace()
self.gen_net_trace()
self.trace = zip(self.cpu, self.mem, self.disk, self.net)
return self.trace
#tg = TraceGenerator()
#cpu = tg.gen_cpu_trace()
#mem = tg.gen_mem_trace()
#disk = tg.gen_disk_trace()
#net = tg.gen_net_trace()
#trace = zip(cpu, mem, disk, net)
#print trace
#plt.bar(range(0,len(cpu)), cpu)
#plt.show()
| gpl-3.0 |
F5Networks/f5-ansible | ansible_collections/f5networks/f5_modules/plugins/modules/bigip_device_group.py | 1 | 19251 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigip_device_group
short_description: Manage device groups on a BIG-IP
description:
- Managing device groups allows you to create HA pairs and clusters
of BIG-IP devices. Usage of this module should be done in conjunction
with the C(bigip_configsync_actions) to sync the configuration across
the pair or cluster if auto-sync is disabled.
version_added: "1.0.0"
options:
name:
description:
- Specifies the name of the device group.
type: str
required: True
type:
description:
- Specifies the type of group.
- A C(sync-failover) device group contains devices that synchronize their
configuration data and fail over to one another when a device becomes
unavailable.
- A C(sync-only) device group has no such failover. When creating a new
device group, this option will default to C(sync-only).
- This setting cannot be changed once it has been set.
type: str
choices:
- sync-failover
- sync-only
description:
description:
- Description of the device group.
type: str
auto_sync:
description:
- Indicates whether configuration synchronization occurs manually or
automatically.
- When creating a new device group, this option will default to C(no).
type: bool
default: no
save_on_auto_sync:
description:
- When performing an auto-sync, specifies whether the configuration
will be saved or not.
- When C(no), only the running configuration will be changed on the
device(s) being synced to.
- When creating a new device group, this option will default to C(no).
type: bool
full_sync:
description:
- Specifies whether the system synchronizes the entire configuration
during synchronization operations.
- When C(no), the system performs incremental synchronization operations,
based on the cache size specified in C(max_incremental_sync_size).
- Incremental configuration synchronization is a mechanism for synchronizing
a device-group's configuration among its members, without requiring a
full configuration load for each configuration change.
- In order for this to work, all devices in the device-group must initially
agree on the configuration. Typically this requires at least one full
configuration load to each device.
- When creating a new device group, this option will default to C(no).
type: bool
max_incremental_sync_size:
description:
- Specifies the size of the changes cache for incremental sync.
- For example, using the default, if you make more than 1024 KB worth of
incremental changes, the system performs a full synchronization operation.
- Using incremental synchronization operations can reduce the per-device sync/load
time for configuration changes.
- This setting is relevant only when C(full_sync) is C(no).
type: int
state:
description:
- When C(state) is C(present), ensures the device group exists.
- When C(state) is C(absent), ensures the device group is removed.
type: str
choices:
- present
- absent
default: present
network_failover:
description:
- Indicates whether failover occurs over the network or is hard-wired.
- This parameter is only valid for C(type)s that are C(sync-failover).
type: bool
notes:
- This module is primarily used as a component of configuring HA pairs of
BIG-IP devices.
- Requires BIG-IP >= 12.1.x.
extends_documentation_fragment: f5networks.f5_modules.f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a sync-only device group
bigip_device_group:
name: foo-group
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Create a sync-only device group with auto-sync enabled
bigip_device_group:
name: foo-group
auto_sync: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
save_on_auto_sync:
description: The new save_on_auto_sync value of the device group.
returned: changed
type: bool
sample: true
full_sync:
description: The new full_sync value of the device group.
returned: changed
type: bool
sample: false
description:
description: The new description of the device group.
returned: changed
type: str
sample: this is a device group
type:
description: The new type of the device group.
returned: changed
type: str
sample: sync-failover
auto_sync:
description: The new auto_sync value of the device group.
returned: changed
type: bool
sample: true
max_incremental_sync_size:
description: The new sync size of the device group.
returned: changed
type: int
sample: 1000
network_failover:
description: Whether or not network failover is enabled.
returned: changed
type: bool
sample: yes
'''
from datetime import datetime
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.parsing.convert_bool import BOOLEANS_TRUE
from ..module_utils.bigip import F5RestClient
from ..module_utils.common import (
F5ModuleError, AnsibleF5Parameters, f5_argument_spec
)
from ..module_utils.icontrol import tmos_version
from ..module_utils.teem import send_teem
class Parameters(AnsibleF5Parameters):
api_map = {
'saveOnAutoSync': 'save_on_auto_sync',
'fullLoadOnSync': 'full_sync',
'autoSync': 'auto_sync',
'incrementalConfigSyncSizeMax': 'max_incremental_sync_size',
'networkFailover': 'network_failover',
}
api_attributes = [
'saveOnAutoSync',
'fullLoadOnSync',
'description',
'type',
'autoSync',
'incrementalConfigSyncSizeMax',
'networkFailover',
]
returnables = [
'save_on_auto_sync',
'full_sync',
'description',
'type',
'auto_sync',
'max_incremental_sync_size',
'network_failover',
]
updatables = [
'save_on_auto_sync',
'full_sync',
'description',
'auto_sync',
'max_incremental_sync_size',
'network_failover',
]
@property
def max_incremental_sync_size(self):
if not self.full_sync and self._values['max_incremental_sync_size'] is not None:
if self._values['__warnings'] is None:
self._values['__warnings'] = []
self._values['__warnings'].append(
[
dict(
msg='"max_incremental_sync_size has no effect if "full_sync" is not true',
version='2.4'
)
]
)
if self._values['max_incremental_sync_size'] is None:
return None
return int(self._values['max_incremental_sync_size'])
class ApiParameters(Parameters):
@property
def network_failover(self):
if self._values['network_failover'] is None:
return None
elif self._values['network_failover'] == 'enabled':
return True
return False
@property
def auto_sync(self):
if self._values['auto_sync'] is None:
return None
elif self._values['auto_sync'] == 'enabled':
return True
return False
@property
def save_on_auto_sync(self):
if self._values['save_on_auto_sync'] is None:
return None
elif self._values['save_on_auto_sync'] in BOOLEANS_TRUE:
return True
else:
return False
@property
def full_sync(self):
if self._values['full_sync'] is None:
return None
elif self._values['full_sync'] in BOOLEANS_TRUE:
return True
else:
return False
class ModuleParameters(Parameters):
pass
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
change = getattr(self, returnable)
if isinstance(change, dict):
result.update(change)
else:
result[returnable] = change
result = self._filter_params(result)
except Exception:
raise
return result
class UsableChanges(Changes):
@property
def network_failover(self):
if self._values['network_failover'] is None:
return None
elif self._values['network_failover']:
return 'enabled'
return 'disabled'
@property
def auto_sync(self):
if self._values['auto_sync'] is None:
return None
elif self._values['auto_sync']:
return 'enabled'
return 'disabled'
@property
def save_on_auto_sync(self):
if self._values['save_on_auto_sync'] is None:
return None
elif self._values['save_on_auto_sync'] in BOOLEANS_TRUE:
return "true"
else:
return "false"
@property
def full_sync(self):
if self._values['full_sync'] is None:
return None
elif self._values['full_sync'] in BOOLEANS_TRUE:
return "true"
else:
return "false"
class ReportableChanges(Changes):
@property
def network_failover(self):
if self._values['network_failover'] is None:
return None
elif self._values['network_failover'] == 'enabled':
return 'yes'
return 'no'
@property
def auto_sync(self):
if self._values['auto_sync'] is None:
return None
elif self._values['auto_sync'] == 'enabled':
return 'yes'
return 'no'
@property
def save_on_auto_sync(self):
if self._values['save_on_auto_sync'] is None:
return None
elif self._values['save_on_auto_sync'] in BOOLEANS_TRUE:
return "yes"
return "no"
@property
def full_sync(self):
if self._values['full_sync'] is None:
return None
elif self._values['full_sync'] in BOOLEANS_TRUE:
return "yes"
return "no"
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
start = datetime.now().isoformat()
version = tmos_version(self.client)
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
send_teem(start, self.client, self.module, version)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_members_in_group_from_device()
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the device group")
return True
def create(self):
self._set_changed_options()
if self.want.type == 'sync-only' and self.want.network_failover is not None:
raise F5ModuleError(
"'network_failover' may only be specified when 'type' is 'sync-failover'."
)
if self.module.check_mode:
return True
self.create_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def exists(self):
errors = [401, 403, 409, 500, 501, 502, 503, 504]
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
if resp.status in errors or 'code' in response and response['code'] in errors:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_members_in_group_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}/devices/".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status not in [200, 201] or 'code' in response and response['code'] not in [200, 201]:
raise F5ModuleError(resp.content)
for item in response['items']:
new_uri = uri + '{0}'.format(item['name'])
response = self.client.api.delete(new_uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return ApiParameters(params=response)
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
type=dict(
choices=['sync-failover', 'sync-only']
),
description=dict(),
auto_sync=dict(
type='bool',
default='no'
),
save_on_auto_sync=dict(
type='bool',
),
full_sync=dict(
type='bool'
),
name=dict(
required=True
),
max_incremental_sync_size=dict(
type='int'
),
state=dict(
default='present',
choices=['absent', 'present']
),
network_failover=dict(type='bool'),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
totolef/Sick-beard | lib/imdb/parser/http/characterParser.py | 143 | 8139 | """
parser.http.characterParser module (imdb package).
This module provides the classes (and the instances), used to parse
the IMDb pages on the akas.imdb.com server about a character.
E.g., for "Jesse James" the referred pages would be:
main details: http://www.imdb.com/character/ch0000001/
biography: http://www.imdb.com/character/ch0000001/bio
...and so on...
Copyright 2007-2009 Davide Alberani <da@erlug.linux.it>
2008 H. Turgut Uyar <uyar@tekir.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import re
from utils import Attribute, Extractor, DOMParserBase, build_movie, \
analyze_imdbid
from personParser import DOMHTMLMaindetailsParser
from imdb.Movie import Movie
_personIDs = re.compile(r'/name/nm([0-9]{7})')
class DOMHTMLCharacterMaindetailsParser(DOMHTMLMaindetailsParser):
"""Parser for the "filmography" page of a given character.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
bparser = DOMHTMLCharacterMaindetailsParser()
result = bparser.parse(character_biography_html_string)
"""
_containsObjects = True
_film_attrs = [Attribute(key=None,
multi=True,
path={
'link': "./a[1]/@href",
'title': ".//text()",
'status': "./i/a//text()",
'roleID': "./a/@href"
},
postprocess=lambda x:
build_movie(x.get('title') or u'',
movieID=analyze_imdbid(x.get('link') or u''),
roleID=_personIDs.findall(x.get('roleID') or u''),
status=x.get('status') or None,
_parsingCharacter=True))]
extractors = [
Extractor(label='title',
path="//title",
attrs=Attribute(key='name',
path="./text()",
postprocess=lambda x: \
x.replace(' (Character)', '').replace(
'- Filmography by type', '').strip())),
Extractor(label='headshot',
path="//a[@name='headshot']",
attrs=Attribute(key='headshot',
path="./img/@src")),
Extractor(label='akas',
path="//div[h5='Alternate Names:']",
attrs=Attribute(key='akas',
path="./div//text()",
postprocess=lambda x: x.strip().split(' / '))),
Extractor(label='filmography',
path="//div[@class='filmo'][not(h5)]/ol/li",
attrs=_film_attrs),
Extractor(label='filmography sections',
group="//div[@class='filmo'][h5]",
group_key="./h5/a/text()",
group_key_normalize=lambda x: x.lower()[:-1],
path="./ol/li",
attrs=_film_attrs),
]
preprocessors = [
# Check that this doesn't cut "status"...
(re.compile(r'<br>(\.\.\.| ).+?</li>', re.I | re.M), '</li>')]
class DOMHTMLCharacterBioParser(DOMParserBase):
"""Parser for the "biography" page of a given character.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
bparser = DOMHTMLCharacterBioParser()
result = bparser.parse(character_biography_html_string)
"""
_defGetRefs = True
extractors = [
Extractor(label='introduction',
path="//div[@id='_intro']",
attrs=Attribute(key='introduction',
path=".//text()",
postprocess=lambda x: x.strip())),
Extractor(label='biography',
path="//span[@class='_biography']",
attrs=Attribute(key='biography',
multi=True,
path={
'info': "./preceding-sibling::h4[1]//text()",
'text': ".//text()"
},
postprocess=lambda x: u'%s: %s' % (
x.get('info').strip(),
x.get('text').replace('\n',
' ').replace('||', '\n\n').strip()))),
]
preprocessors = [
(re.compile('(<div id="swiki.2.3.1">)', re.I), r'\1<div id="_intro">'),
(re.compile('(<a name="history">)\s*(<table .*?</table>)',
re.I | re.DOTALL),
r'</div>\2\1</a>'),
(re.compile('(<a name="[^"]+">)(<h4>)', re.I), r'</span>\1</a>\2'),
(re.compile('(</h4>)</a>', re.I), r'\1<span class="_biography">'),
(re.compile('<br/><br/>', re.I), r'||'),
(re.compile('\|\|\n', re.I), r'</span>'),
]
class DOMHTMLCharacterQuotesParser(DOMParserBase):
"""Parser for the "quotes" page of a given character.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
qparser = DOMHTMLCharacterQuotesParser()
result = qparser.parse(character_quotes_html_string)
"""
_defGetRefs = True
extractors = [
Extractor(label='charquotes',
group="//h5",
group_key="./a/text()",
path="./following-sibling::div[1]",
attrs=Attribute(key=None,
path={'txt': ".//text()",
'movieID': ".//a[1]/@href"},
postprocess=lambda x: (analyze_imdbid(x['movieID']),
x['txt'].strip().replace(': ',
': ').replace(': ', ': ').split('||'))))
]
preprocessors = [
(re.compile('(</h5>)', re.I), r'\1<div>'),
(re.compile('\s*<br/><br/>\s*', re.I), r'||'),
(re.compile('\|\|\s*(<hr/>)', re.I), r'</div>\1'),
(re.compile('\s*<br/>\s*', re.I), r'::')
]
def postprocess_data(self, data):
if not data:
return {}
newData = {}
for title in data:
movieID, quotes = data[title]
if movieID is None:
movie = title
else:
movie = Movie(title=title, movieID=movieID,
accessSystem=self._as, modFunct=self._modFunct)
newData[movie] = [quote.split('::') for quote in quotes]
return {'quotes': newData}
from personParser import DOMHTMLSeriesParser
_OBJECTS = {
'character_main_parser': ((DOMHTMLCharacterMaindetailsParser,),
{'kind': 'character'}),
'character_series_parser': ((DOMHTMLSeriesParser,), None),
'character_bio_parser': ((DOMHTMLCharacterBioParser,), None),
'character_quotes_parser': ((DOMHTMLCharacterQuotesParser,), None)
}
| gpl-3.0 |
huanpc/IoT-1 | gui/controller/.venv/lib/python3.5/site-packages/django/forms/forms.py | 141 | 19457 | """
Form classes
"""
from __future__ import unicode_literals
import copy
from collections import OrderedDict
from django.core.exceptions import NON_FIELD_ERRORS, ValidationError
# BoundField is imported for backwards compatibility in Django 1.9
from django.forms.boundfield import BoundField # NOQA
from django.forms.fields import Field, FileField
# pretty_name is imported for backwards compatibility in Django 1.9
from django.forms.utils import ErrorDict, ErrorList, pretty_name # NOQA
from django.forms.widgets import Media, MediaDefiningClass
from django.utils import six
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.html import conditional_escape, html_safe
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
__all__ = ('BaseForm', 'Form')
class DeclarativeFieldsMetaclass(MediaDefiningClass):
"""
Metaclass that collects Fields declared on the base classes.
"""
def __new__(mcs, name, bases, attrs):
# Collect fields from current class.
current_fields = []
for key, value in list(attrs.items()):
if isinstance(value, Field):
current_fields.append((key, value))
attrs.pop(key)
current_fields.sort(key=lambda x: x[1].creation_counter)
attrs['declared_fields'] = OrderedDict(current_fields)
new_class = (super(DeclarativeFieldsMetaclass, mcs)
.__new__(mcs, name, bases, attrs))
# Walk through the MRO.
declared_fields = OrderedDict()
for base in reversed(new_class.__mro__):
# Collect fields from base class.
if hasattr(base, 'declared_fields'):
declared_fields.update(base.declared_fields)
# Field shadowing.
for attr, value in base.__dict__.items():
if value is None and attr in declared_fields:
declared_fields.pop(attr)
new_class.base_fields = declared_fields
new_class.declared_fields = declared_fields
return new_class
@html_safe
@python_2_unicode_compatible
class BaseForm(object):
# This is the main implementation of all the Form logic. Note that this
# class is different than Form. See the comments by the Form class for more
# information. Any improvements to the form API should be made to *this*
# class, not to the Form class.
field_order = None
prefix = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False, field_order=None):
self.is_bound = data is not None or files is not None
self.data = data or {}
self.files = files or {}
self.auto_id = auto_id
if prefix is not None:
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
# Translators: This is the default suffix added to form field labels
self.label_suffix = label_suffix if label_suffix is not None else _(':')
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = copy.deepcopy(self.base_fields)
self._bound_fields_cache = {}
self.order_fields(self.field_order if field_order is None else field_order)
def order_fields(self, field_order):
"""
Rearranges the fields according to field_order.
field_order is a list of field names specifying the order. Fields not
included in the list are appended in the default order for backward
compatibility with subclasses not overriding field_order. If field_order
is None, all fields are kept in the order defined in the class.
Unknown fields in field_order are ignored to allow disabling fields in
form subclasses without redefining ordering.
"""
if field_order is None:
return
fields = OrderedDict()
for key in field_order:
try:
fields[key] = self.fields.pop(key)
except KeyError: # ignore unknown fields
pass
fields.update(self.fields) # add remaining fields in original order
self.fields = fields
def __str__(self):
return self.as_table()
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = self.is_bound and not bool(self._errors)
return '<%(cls)s bound=%(bound)s, valid=%(valid)s, fields=(%(fields)s)>' % {
'cls': self.__class__.__name__,
'bound': self.is_bound,
'valid': is_valid,
'fields': ';'.join(self.fields),
}
def __iter__(self):
for name in self.fields:
yield self[name]
def __getitem__(self, name):
"Returns a BoundField with the given name."
try:
field = self.fields[name]
except KeyError:
raise KeyError(
"Key %r not found in '%s'" % (name, self.__class__.__name__))
if name not in self._bound_fields_cache:
self._bound_fields_cache[name] = field.get_bound_field(self, name)
return self._bound_fields_cache[name]
@property
def errors(self):
"Returns an ErrorDict for the data provided for the form"
if self._errors is None:
self.full_clean()
return self._errors
def is_valid(self):
"""
Returns True if the form has no errors. Otherwise, False. If errors are
being ignored, returns False.
"""
return self.is_bound and not self.errors
def add_prefix(self, field_name):
"""
Returns the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return '%s-%s' % (self.prefix, field_name) if self.prefix else field_name
def add_initial_prefix(self, field_name):
"""
Add a 'initial' prefix for checking dynamic initial values
"""
return 'initial-%s' % self.add_prefix(field_name)
def _html_output(self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row):
"Helper function for outputting HTML. Used by as_table(), as_ul(), as_p()."
top_errors = self.non_field_errors() # Errors that should be displayed above all fields.
output, hidden_fields = [], []
for name, field in self.fields.items():
html_class_attr = ''
bf = self[name]
# Escape and cache in local variable.
bf_errors = self.error_class([conditional_escape(error) for error in bf.errors])
if bf.is_hidden:
if bf_errors:
top_errors.extend(
[_('(Hidden field %(name)s) %(error)s') % {'name': name, 'error': force_text(e)}
for e in bf_errors])
hidden_fields.append(six.text_type(bf))
else:
# Create a 'class="..."' attribute if the row should have any
# CSS classes applied.
css_classes = bf.css_classes()
if css_classes:
html_class_attr = ' class="%s"' % css_classes
if errors_on_separate_row and bf_errors:
output.append(error_row % force_text(bf_errors))
if bf.label:
label = conditional_escape(force_text(bf.label))
label = bf.label_tag(label) or ''
else:
label = ''
if field.help_text:
help_text = help_text_html % force_text(field.help_text)
else:
help_text = ''
output.append(normal_row % {
'errors': force_text(bf_errors),
'label': force_text(label),
'field': six.text_type(bf),
'help_text': help_text,
'html_class_attr': html_class_attr,
'css_classes': css_classes,
'field_name': bf.html_name,
})
if top_errors:
output.insert(0, error_row % force_text(top_errors))
if hidden_fields: # Insert any hidden fields in the last row.
str_hidden = ''.join(hidden_fields)
if output:
last_row = output[-1]
# Chop off the trailing row_ender (e.g. '</td></tr>') and
# insert the hidden fields.
if not last_row.endswith(row_ender):
# This can happen in the as_p() case (and possibly others
# that users write): if there are only top errors, we may
# not be able to conscript the last row for our purposes,
# so insert a new, empty row.
last_row = (normal_row % {
'errors': '',
'label': '',
'field': '',
'help_text': '',
'html_class_attr': html_class_attr,
'css_classes': '',
'field_name': '',
})
output.append(last_row)
output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender
else:
# If there aren't any rows in the output, just append the
# hidden fields.
output.append(str_hidden)
return mark_safe('\n'.join(output))
def as_table(self):
"Returns this form rendered as HTML <tr>s -- excluding the <table></table>."
return self._html_output(
normal_row='<tr%(html_class_attr)s><th>%(label)s</th><td>%(errors)s%(field)s%(help_text)s</td></tr>',
error_row='<tr><td colspan="2">%s</td></tr>',
row_ender='</td></tr>',
help_text_html='<br /><span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_ul(self):
"Returns this form rendered as HTML <li>s -- excluding the <ul></ul>."
return self._html_output(
normal_row='<li%(html_class_attr)s>%(errors)s%(label)s %(field)s%(help_text)s</li>',
error_row='<li>%s</li>',
row_ender='</li>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_p(self):
"Returns this form rendered as HTML <p>s."
return self._html_output(
normal_row='<p%(html_class_attr)s>%(label)s %(field)s%(help_text)s</p>',
error_row='%s',
row_ender='</p>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=True)
def non_field_errors(self):
"""
Returns an ErrorList of errors that aren't associated with a particular
field -- i.e., from Form.clean(). Returns an empty ErrorList if there
are none.
"""
return self.errors.get(NON_FIELD_ERRORS, self.error_class(error_class='nonfield'))
def add_error(self, field, error):
"""
Update the content of `self._errors`.
The `field` argument is the name of the field to which the errors
should be added. If its value is None the errors will be treated as
NON_FIELD_ERRORS.
The `error` argument can be a single error, a list of errors, or a
dictionary that maps field names to lists of errors. What we define as
an "error" can be either a simple string or an instance of
ValidationError with its message attribute set and what we define as
list or dictionary can be an actual `list` or `dict` or an instance
of ValidationError with its `error_list` or `error_dict` attribute set.
If `error` is a dictionary, the `field` argument *must* be None and
errors will be added to the fields that correspond to the keys of the
dictionary.
"""
if not isinstance(error, ValidationError):
# Normalize to ValidationError and let its constructor
# do the hard work of making sense of the input.
error = ValidationError(error)
if hasattr(error, 'error_dict'):
if field is not None:
raise TypeError(
"The argument `field` must be `None` when the `error` "
"argument contains errors for multiple fields."
)
else:
error = error.error_dict
else:
error = {field or NON_FIELD_ERRORS: error.error_list}
for field, error_list in error.items():
if field not in self.errors:
if field != NON_FIELD_ERRORS and field not in self.fields:
raise ValueError(
"'%s' has no field named '%s'." % (self.__class__.__name__, field))
if field == NON_FIELD_ERRORS:
self._errors[field] = self.error_class(error_class='nonfield')
else:
self._errors[field] = self.error_class()
self._errors[field].extend(error_list)
if field in self.cleaned_data:
del self.cleaned_data[field]
def has_error(self, field, code=None):
if code is None:
return field in self.errors
if field in self.errors:
for error in self.errors.as_data()[field]:
if error.code == code:
return True
return False
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self.cleaned_data.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data has
# changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
self._post_clean()
def _clean_fields(self):
for name, field in self.fields.items():
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
if field.disabled:
value = self.initial.get(name, field.initial)
else:
value = field.widget.value_from_datadict(self.data, self.files, self.add_prefix(name))
try:
if isinstance(field, FileField):
initial = self.initial.get(name, field.initial)
value = field.clean(value, initial)
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, 'clean_%s' % name):
value = getattr(self, 'clean_%s' % name)()
self.cleaned_data[name] = value
except ValidationError as e:
self.add_error(name, e)
def _clean_form(self):
try:
cleaned_data = self.clean()
except ValidationError as e:
self.add_error(None, e)
else:
if cleaned_data is not None:
self.cleaned_data = cleaned_data
def _post_clean(self):
"""
An internal hook for performing additional cleaning after form cleaning
is complete. Used for model validation in model forms.
"""
pass
def clean(self):
"""
Hook for doing any extra form-wide cleaning after Field.clean() has been
called on every field. Any ValidationError raised by this method will
not be associated with a particular field; it will have a special-case
association with the field named '__all__'.
"""
return self.cleaned_data
def has_changed(self):
"""
Returns True if data differs from initial.
"""
return bool(self.changed_data)
@cached_property
def changed_data(self):
data = []
for name, field in self.fields.items():
prefixed_name = self.add_prefix(name)
data_value = field.widget.value_from_datadict(self.data, self.files, prefixed_name)
if not field.show_hidden_initial:
initial_value = self.initial.get(name, field.initial)
if callable(initial_value):
initial_value = initial_value()
else:
initial_prefixed_name = self.add_initial_prefix(name)
hidden_widget = field.hidden_widget()
try:
initial_value = field.to_python(hidden_widget.value_from_datadict(
self.data, self.files, initial_prefixed_name))
except ValidationError:
# Always assume data has changed if validation fails.
data.append(name)
continue
if field.has_changed(initial_value, data_value):
data.append(name)
return data
@property
def media(self):
"""
Provide a description of all media required to render the widgets on this form
"""
media = Media()
for field in self.fields.values():
media = media + field.widget.media
return media
def is_multipart(self):
"""
Returns True if the form needs to be multipart-encoded, i.e. it has
FileInput. Otherwise, False.
"""
for field in self.fields.values():
if field.widget.needs_multipart_form:
return True
return False
def hidden_fields(self):
"""
Returns a list of all the BoundField objects that are hidden fields.
Useful for manual form layout in templates.
"""
return [field for field in self if field.is_hidden]
def visible_fields(self):
"""
Returns a list of BoundField objects that aren't hidden fields.
The opposite of the hidden_fields() method.
"""
return [field for field in self if not field.is_hidden]
class Form(six.with_metaclass(DeclarativeFieldsMetaclass, BaseForm)):
"A collection of Fields, plus their associated data."
# This is a separate class from BaseForm in order to abstract the way
# self.fields is specified. This class (Form) is the one that does the
# fancy metaclass stuff purely for the semantic sugar -- it allows one
# to define a form using declarative syntax.
# BaseForm itself has no way of designating self.fields.
| mit |
sbelharbi/structured-output-ae | sop_embed/experiments/lfpw_4l_out_unl.py | 1 | 14984 | from sop_embed.da import DenoisingAutoencoder
from sop_embed.tools import NonLinearity
from sop_embed.tools import CostType
from sop_embed.tools import ModelMLP
from sop_embed.tools import train_one_epoch_chuncks
from sop_embed.tools import theano_fns
from sop_embed.tools import sharedX_value
from sop_embed.tools import collect_stats_epoch
from sop_embed.tools import plot_stats
from sop_embed.tools import split_data_to_minibatchs_eval
from sop_embed.tools import split_data_to_minibatchs_embed
from sop_embed.tools import evaluate_model
from sop_embed.tools import StaticAnnealedWeightRate
from sop_embed.tools import StaticExponentialDecayWeightRate
from sop_embed.tools import StaticExponentialDecayWeightRateSingle
from sop_embed.tools import StaticAnnealedWeightRateSingle
from sop_embed.tools import print_stats_train
from sop_embed.learning_rule import AdaDelta
from sop_embed.learning_rule import Momentum
import theano.tensor as T
import theano
import numpy as np
import cPickle as pkl
import datetime as DT
import os
import inspect
import sys
import shutil
from random import shuffle
# Alexnet: 9216
# VGG16: 25088
def standardize(data):
"""
Normalize the data with respect to finding the mean and the standard
deviation of it and dividing by mean and standard deviation.
"""
mu = np.mean(data, axis=0)
sigma = np.std(data, axis=0)
if sigma.nonzero()[0].shape[0] == 0:
raise Exception("Std dev should not be zero")
norm_data = (data - mu) / sigma
return norm_data
if __name__ == "__main__":
type_mod = ""
if type_mod is "alexnet":
dim_in = 9216
if type_mod is "vgg_16":
dim_in = 25088
if type_mod is "vgg_19":
dim_in = 9216
if type_mod is "googlenet":
dim_in = 9216
faceset = "lfpw"
fd_data = "../../inout/data/face/" + faceset + "_u_helen/"
path_valid = fd_data + type_mod + "valid.pkl"
w, h = 50, 50
if type_mod is not None and type_mod is not "":
w, h = dim_in, 1
input = T.fmatrix("x_input")
output = T.fmatrix("y_output")
# Create mixed data
nbr_sup, nbr_xx, nbr_yy = 676, 2330, 2330
id_data = type_mod + "ch_tr_" + str(nbr_sup) + '_' + str(nbr_xx) + '_' +\
str(nbr_yy)
# List train chuncks
l_ch_tr = [fd_data + "train.pkl"]
time_exp = DT.datetime.now().strftime('%m_%d_%Y_%H_%M_%s')
fold_exp = "../../exps/" + faceset + "_" + time_exp
if not os.path.exists(fold_exp):
os.makedirs(fold_exp)
nbr_layers = 4
init_w_path = "../../inout/init_weights/" + str(nbr_layers) + '_' +\
faceset + '_layers/'
if not os.path.exists(init_w_path):
os.makedirs(init_w_path)
rnd = np.random.RandomState(1231)
nhid_l0 = 1025
nhid_l1 = 512
nhid_l2 = 64
# Create the AE in 1
nvis, nhid = w*h, nhid_l0
path_ini_params_l0 = init_w_path + "dae_w_l0_init_" + str(nvis) + '_' +\
str(nhid) + ".pkl"
dae_l0 = DenoisingAutoencoder(input,
nvis=nvis,
nhid=nhid,
L1_reg=0.,
L2_reg=1e-2,
rnd=rnd,
nonlinearity=NonLinearity.SIGMOID,
cost_type=CostType.MeanSquared,
reverse=False,
corruption_level=0.2)
if not os.path.isfile(path_ini_params_l0):
dae_l0.save_params(path_ini_params_l0)
else:
dae_l0.set_params_vals(path_ini_params_l0)
# Create the AE in 2
nvis, nhid = nhid_l0, nhid_l1
path_ini_params_l1 = init_w_path + "dae_w_l1_init_" + str(nvis) + '_' +\
str(nhid) + ".pkl"
dae_l1 = DenoisingAutoencoder(dae_l0.encode((input)),
nvis=nhid_l0,
nhid=nhid_l1,
L1_reg=0.,
L2_reg=1e-2,
rnd=rnd,
nonlinearity=NonLinearity.SIGMOID,
cost_type=CostType.MeanSquared,
reverse=False,
corruption_level=0.01)
if not os.path.isfile(path_ini_params_l1):
dae_l1.save_params(path_ini_params_l1)
else:
dae_l1.set_params_vals(path_ini_params_l1)
# Create the AE in 3
nvis, nhid = nhid_l1, nhid_l2
path_ini_params_l2 = init_w_path + "dae_w_l2_init_" + str(nvis) + '_' +\
str(nhid) + ".pkl"
dae_l2 = DenoisingAutoencoder(dae_l1.encode(dae_l0.encode((input))),
nvis=nhid_l1,
nhid=nhid_l2,
L1_reg=0.,
L2_reg=0.,
rnd=rnd,
nonlinearity=NonLinearity.TANH,
cost_type=CostType.MeanSquared,
reverse=False)
if not os.path.isfile(path_ini_params_l2):
dae_l2.save_params(path_ini_params_l2)
else:
dae_l2.set_params_vals(path_ini_params_l2)
# Create the AE out
nvis, nhid = 68*2, nhid_l2
path_ini_params_l3 = init_w_path + "dae_w_l3_init_" + str(nvis) + '_' +\
str(nhid) + ".pkl"
dae_l3 = DenoisingAutoencoder(output,
L1_reg=0.,
L2_reg=1e-2,
nvis=nvis,
nhid=nhid,
rnd=rnd,
nonlinearity=NonLinearity.TANH,
cost_type=CostType.MeanSquared,
reverse=True)
if not os.path.isfile(path_ini_params_l3):
dae_l3.save_params(path_ini_params_l3)
else:
dae_l3.set_params_vals(path_ini_params_l3)
# Create the network
rng = np.random.RandomState(23455)
layer0 = {
"rng": rng,
"n_in": w*h,
"n_out": nhid_l0,
"W": dae_l0.hidden.W,
"b": dae_l0.hidden.b,
"activation": NonLinearity.SIGMOID
}
layer1 = {
"rng": rng,
"n_in": nhid_l0,
"n_out": nhid_l1,
"W": dae_l1.hidden.W,
"b": dae_l1.hidden.b,
"activation": NonLinearity.SIGMOID
}
layer2 = {
"rng": rng,
"n_in": nhid_l1,
"n_out": nhid_l2,
"W": dae_l2.hidden.W,
"b": dae_l2.hidden.b,
"activation": NonLinearity.TANH
}
layer3 = {
"rng": rng,
"n_in": nhid_l2,
"n_out": 68*2,
"W": dae_l3.hidden.W_prime,
"b": dae_l3.hidden.b_prime,
"activation": NonLinearity.TANH
}
layers = [layer0, layer1, layer2, layer3]
# dropout = [float(sys.argv[1]), float(sys.argv[2]), float(sys.argv[3]),
# float(sys.argv[4])]
dropout = [0.0, 0.0, 0.0, 0.0]
# number of the hidden layer just before the output ae. Default: None
id_code = None
model = ModelMLP(layers, input, l1_reg=0., l2_reg=0., reg_bias=False,
dropout=dropout, id_code=id_code)
aes_in = []
aes_out = [dae_l3]
if id_code is not None:
assert aes_out != []
# Train
# Data
tr_batch_size = 10
vl_batch_size = 8000
with open(path_valid, 'r') as f:
l_samples_vl = pkl.load(f)
list_minibatchs_vl = split_data_to_minibatchs_eval(
l_samples_vl, vl_batch_size)
max_epochs = int(1000)
lr_vl = 1e-3
lr = sharedX_value(lr_vl, name="lr")
# cost weights
separate = True
l_in = [sharedX_value(0., name="l_in"), sharedX_value(0.0, name="l_in2")]
l_out = [sharedX_value(1., name="l_out")]
l_sup = sharedX_value(1., name="l_sup")
l_code = sharedX_value(0.0, name="l_code")
if not separate:
assert l_sup.get_value() + l_in.get_value() + l_out.get_value() == 1.
if l_in[0].get_value() != 0. and aes_in == []:
raise ValueError("You setup the l_in but no aes in found.")
if l_out[0].get_value() != 0. and aes_out == []:
raise ValueError("You setup the l_out but no aes out found.")
# Train criterion
cost_type = CostType.MeanSquared # CostType.MeanSquared
# Compile the functions
# Momentum(0.9, nesterov_momentum=False,
# imagenet=False, imagenetDecay=5e-4,
# max_colm_norm=False)
train_updates, eval_fn = theano_fns(
model, aes_in, aes_out, l_in, l_out, l_sup, l_code, lr,
cost_type,
updaters={
"all": Momentum(0.9, nesterov_momentum=False,
imagenet=False, imagenetDecay=5e-4,
max_colm_norm=False),
"in": Momentum(0.9, nesterov_momentum=False,
imagenet=False, imagenetDecay=5e-4,
max_colm_norm=False),
"out": Momentum(0.9, nesterov_momentum=False,
imagenet=False, imagenetDecay=5e-4,
max_colm_norm=False),
"code": None},
max_colm_norm=False, max_norm=15.0, eye=False)
# How to update the weight costs
updater_wc = StaticAnnealedWeightRate(anneal_end=500, anneal_start=0)
updater_wc_in = StaticAnnealedWeightRateSingle(anneal_end=500, down=True,
init_vl=1., end_vl=0.,
anneal_start=100)
updater_wc_in2 = StaticAnnealedWeightRateSingle(anneal_end=500, down=True,
init_vl=0.0, end_vl=0.,
anneal_start=400)
updater_wc_out = StaticAnnealedWeightRateSingle(anneal_end=700, down=True,
init_vl=1., end_vl=0.,
anneal_start=100)
# how to update the weight code
# l_code_updater = StaticExponentialDecayWeightRateSingle(slop=20,
# anneal_start=0)
to_update = {"l_in": True, "l_out": True}
if aes_in == []:
to_update["l_in"] = False
if aes_out == []:
to_update["l_out"] = False
# Train
i = 0
# Stats
train_stats = {"in_cost": [], "out_cost": [],
"all_cost": [], "tr_pure_cost": [], "code_cost": [],
"in_cost_mb": [], "out_cost_mb": [], "all_cost_mb": [],
"tr_pure_cost_mb": [], "error_tr": [], "error_vl": [],
"error_tr_mb": [], "error_vl_mb": [], "code_cost_mb": [],
"best_epoch": 0, "best_mb": 0}
# tag
if aes_in == [] and aes_out == []:
tag = "sup"
elif aes_in != [] and aes_out == []:
tag = "sup + in"
elif aes_in == [] and aes_out != []:
tag = "sup + out"
elif aes_in != [] and aes_out != []:
tag = "sup + in + out"
tag += ", data: " + faceset + " " + id_data
# First evaluation on valid
error_mn, _ = evaluate_model(list_minibatchs_vl, eval_fn)
vl_error_begin = np.mean(error_mn)
shutil.copy(inspect.stack()[0][1], fold_exp)
l_ch_tr_vl = []
for ch in l_ch_tr:
with open(ch, 'r') as f:
l_samples = pkl.load(f)
l_ch_tr_vl.append(l_samples)
stop = False
while i < max_epochs:
stop = (i == max_epochs - 1)
stats = train_one_epoch_chuncks(
train_updates, eval_fn, l_ch_tr_vl,
l_in, l_out, l_sup, l_code, list_minibatchs_vl,
model, aes_in, aes_out, i, fold_exp, train_stats,
vl_error_begin, tag, tr_batch_size, stop=stop)
# Shuffle the minibatchs: to avoid periodic behavior.
# for ts in xrange(100):
# shuffle(l_ch_tr_vl)
# Collect stats
train_stats = collect_stats_epoch(stats, train_stats)
# Print train stats
# print_stats_train(train_stats, i, "", 0)
# reduce the frequency of the disc access, it costs too much!!!
if stop:
# Plot stats: epoch
plot_stats(train_stats, "epoch", fold_exp, tag)
# Save stats
with open(fold_exp + "/train_stats.pkl", 'w') as f_ts:
pkl.dump(train_stats, f_ts)
# Update the cost weights
if aes_in != [] or aes_out != []:
# updater_wc(l_sup, l_in, l_out, i, to_update)
updater_wc_out(l_out[0], i)
print "\n", l_sup.get_value(), l_out[0].get_value()
for el in l_in:
print el.get_value(),
print ""
if id_code is not None:
l_code_updater(l_code, i)
print "l_code:", l_code.get_value()
# Check the stopping criterion
# [TODO]
# Update lr
# if (i % 1 == 0):
# lr.set_value(np.cast[theano.config.floatX](lr.get_value()/1.0001))
# print "lr:", lr.get_value()
i += 1
del stats
cmd = "python evaluate_face_new_data_unl.py " + str(faceset) + " " +\
str(fold_exp) + " mlp"
# with open("./" + str(time_exp) + ".py", "w") as python_file:
# python_file.write("import os \n")
# cmd2 = 'os.system("' + cmd + '")'
# python_file.write(cmd2)
os.system(cmd)
# # std_data = standardize(x_data)
# std_data = np.asarray(x_data, dtype=theano.config.floatX)
#
# dae_l0.fit(learning_rate=9.96*1e-3,
# shuffle_data=True,
# data=std_data,
# weights_file=weights_file_l0,
# recons_img_file=None,
# corruption_level=0.095,
# batch_size=40,
# n_epochs=2)
#
# dae_l0_obj_out = open("dae_l0_obj.pkl", "wb")
# pkl.dump(dae_l0, dae_l0_obj_out, protocol=pkl.HIGHEST_PROTOCOL)
#
# dae_l0_out = dae_l0.encode((input))
# dae_l0_h = dae_l0.encode(std_data)
# dae_l0_h_fn = theano.function([], dae_l0_h)
# dae_l1_in = dae_l0_h_fn()
# dae_l1_in = np.asarray(dae_l1_in, dtype=theano.config.floatX)
#
# dae_l1 = DenoisingAutoencoder(dae_l0_out,
# L1_reg=1e-4,
# L2_reg=6*1e-4,
# nvis=nhid_l0,
# nhid=nhid_l1,
# rnd=rnd,
# reverse=True)
#
# dae_l1.fit(learning_rate=0.95*1e-2,
# data=dae_l1_in,
# shuffle_data=True,
# recons_img_file=None,
# weights_file=weights_file_l1,
# corruption_level=0.1,
# batch_size=25,
# n_epochs=2) # 1400
#
# dae_l1_obj_out = open("dae_l1_obj.pkl", "wb")
# pkl.dump(dae_l1, dae_l1_obj_out, protocol=pkl.HIGHEST_PROTOCOL)
| lgpl-3.0 |
XiaodunServerGroup/ddyedx | common/djangoapps/course_modes/migrations/0005_auto__add_field_coursemode_expiration_datetime.py | 59 | 1858 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CourseMode.expiration_datetime'
db.add_column('course_modes_coursemode', 'expiration_datetime',
self.gf('django.db.models.fields.DateTimeField')(default=None, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CourseMode.expiration_datetime'
db.delete_column('course_modes_coursemode', 'expiration_datetime')
models = {
'course_modes.coursemode': {
'Meta': {'unique_together': "(('course_id', 'mode_slug', 'currency'),)", 'object_name': 'CourseMode'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'expiration_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mode_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'suggested_prices': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['course_modes'] | agpl-3.0 |
tensorflow/tensorflow | tensorflow/python/distribute/values_util.py | 11 | 14810 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions used by values.py and ps_values.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distribution_strategy_context as ds_context
from tensorflow.python.distribute import reduce_util
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.saved_model import save_context
from tensorflow.python.saved_model import save_options
from tensorflow.python.training.saving import saveable_object
def write_object_proto(var, proto, options):
"""Update a SavedObject proto for the caller.
If a DistributedVariable object supports this method, it will be called when
saving with a pre-built `SavedObject` proto representing the object, plus an
instance of `SaveOptions`. This method is then free to modify that proto
instance.
`DistributedVariable` with `AUTO` or `ON_WRITE` synchronization optionally
write out information about their components to the
`experimental_distributed_variable_components` field of a
`SavedVariable` (depending on the `SaveOptions` variable policy).
Args:
var: The DistributedVariable object.
proto: A pre-built `SavedObject` proto for this object. It is assumed this
will be a `SavedVariable` instance.
options: A `SaveOptions` instance.
"""
if options.experimental_variable_policy._expand_distributed_variables( # pylint: disable=protected-access
):
for var in var.values:
var_proto = (
proto.variable.experimental_distributed_variable_components.add())
var_proto.name = var.name.split(":")[0]
var_proto.device = var.device
def get_on_write_saveable(var, primary_var, name):
"""Return saveable spec for AUTO and ON_WRITE variables."""
# We use a callable so that we don't have to evaluate this expression
# in the case where we are trying to restore instead of save.
def tensor():
if context.executing_eagerly() and not primary_var.is_initialized():
# A SaveSpec tensor value of `None` indicates that the variable is
# uninitialized.
return None
strategy = var.distribute_strategy
return strategy.extended.read_var(var)
spec = saveable_object.SaveSpec(
tensor=tensor,
slice_spec="",
name=name,
dtype=var.dtype,
device=primary_var.device)
return tensor, [spec]
def get_on_write_restore_ops(var, tensor):
"""Return restore ops for AUTO and ON_WRITE variables."""
packed_var = var._packed_variable # pylint: disable=protected-access
if packed_var is not None:
return control_flow_ops.group(
tuple(
assign_on_device(d, packed_var, tensor)
for d in packed_var.devices))
return control_flow_ops.group(
tuple(
assign_on_device(v.device, v, tensor)
for v in var.values))
def get_on_read_saveable(var, primary_var, name):
"""Return saveables for ON_READ variable."""
# We use a callable so that we don't have to evaluate this expression
# in the case where we are trying to restore instead of save.
def tensor():
return var._get_cross_replica() # pylint: disable=protected-access
spec = saveable_object.SaveSpec(
tensor=tensor,
slice_spec="",
name=name,
dtype=var.dtype,
device=primary_var.device)
return tensor, [spec]
def get_on_read_restore_ops(var, tensor, aggregation):
"""Return restore ops for ON_READ variables."""
# To preserve the sum across save and restore, we have to divide the
# total across all devices when restoring a variable that was summed
# when saving.
if aggregation == vs.VariableAggregation.SUM:
strategy = var.distribute_strategy
tensor = math_ops.cast(tensor / strategy.num_replicas_in_sync,
var.dtype)
return control_flow_ops.group(
tuple(
assign_on_device(v.device, v, tensor)
for v in var.values))
# Utility function that indicates if you are in an UpdateContext when running
# in a replica fn.
def in_replica_update_context():
return distribute_lib.get_update_replica_id() is not None
def on_write_assign(var, value, use_locking=False, name=None, read_value=True):
assign_fn = lambda var, *a, **kw: var.assign(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=assign_fn,
value=value,
use_locking=use_locking,
name=name,
read_value=read_value)
def on_write_assign_add(var, value, use_locking=False, name=None,
read_value=True):
assign_add_fn = lambda var, *a, **kw: var.assign_add(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=assign_add_fn,
value=value,
use_locking=use_locking,
name=name,
read_value=read_value)
def on_write_assign_sub(var, value, use_locking=False, name=None,
read_value=True):
assign_sub_fn = lambda var, *a, **kw: var.assign_sub(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=assign_sub_fn,
value=value,
use_locking=use_locking,
name=name,
read_value=read_value)
def assign_on_each_device(var, assign_func, value, read_value):
"""Update the variable on each replica with the given assign_func and value."""
if var._packed_variable is not None: # pylint: disable=protected-access
update = control_flow_ops.group(
tuple(
assign_func(d, var._packed_variable, value) for d in var._devices)) # pylint: disable=protected-access
else:
update = control_flow_ops.group(
tuple(assign_func(v.device, v, value) for v in var._values)) # pylint: disable=protected-access
if not read_value:
return update
with ops.control_dependencies([update] if update else []):
return var.read_value()
def on_read_assign_sub_cross_replica(var, value, read_value=True):
with ds_context.enter_or_assert_strategy(var.distribute_strategy):
if ds_context.in_cross_replica_context():
if var.aggregation == vs.VariableAggregation.SUM:
raise ValueError(
"SyncOnReadVariable does not support `assign_sub` in "
"cross-replica context when aggregation is set to "
"`tf.VariableAggregation.SUM`.")
return assign_on_each_device(var, assign_sub_on_device,
value, read_value)
def on_read_assign_add_cross_replica(var, value, read_value=True):
with ds_context.enter_or_assert_strategy(var.distribute_strategy):
if ds_context.in_cross_replica_context():
if var.aggregation == vs.VariableAggregation.SUM:
raise ValueError(
"SyncOnReadVariable does not support `assign_add` in "
"cross-replica context when aggregation is set to "
"`tf.VariableAggregation.SUM`.")
return assign_on_each_device(var, assign_add_on_device,
value, read_value)
def on_read_assign_cross_replica(var, value, read_value=True):
"""Return the value of the variable in cross replica context."""
with ds_context.enter_or_assert_strategy(var.distribute_strategy):
if ds_context.in_cross_replica_context():
# To preserve the sum across save and restore, we have to divide the
# total across all devices when restoring a variable that was summed
# when saving.
tensor = value
if var.aggregation == vs.VariableAggregation.SUM:
strategy = var._distribute_strategy # pylint: disable=protected-access
tensor = math_ops.cast(tensor / strategy.num_replicas_in_sync,
var.dtype)
return assign_on_each_device(var, assign_on_device, tensor,
read_value)
def scatter_sub(var, sparse_delta, use_locking=False, name=None):
scatter_sub_fn = lambda var, *a, **kw: var.scatter_sub(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=scatter_sub_fn,
value=sparse_delta,
use_locking=use_locking,
name=name)
def scatter_add(var, sparse_delta, use_locking=False, name=None):
scatter_add_fn = lambda var, *a, **kw: var.scatter_add(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=scatter_add_fn,
value=sparse_delta,
use_locking=use_locking,
name=name)
def scatter_mul(var, sparse_delta, use_locking=False, name=None):
scatter_mul_fn = lambda var, *a, **kw: var.scatter_mul(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=scatter_mul_fn,
value=sparse_delta,
use_locking=use_locking,
name=name)
def scatter_div(var, sparse_delta, use_locking=False, name=None):
scatter_div_fn = lambda var, *a, **kw: var.scatter_div(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=scatter_div_fn,
value=sparse_delta,
use_locking=use_locking,
name=name)
def scatter_min(var, sparse_delta, use_locking=False, name=None):
scatter_min_fn = lambda var, *a, **kw: var.scatter_min(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=scatter_min_fn,
value=sparse_delta,
use_locking=use_locking,
name=name)
def scatter_max(var, sparse_delta, use_locking=False, name=None):
scatter_max_fn = lambda var, *a, **kw: var.scatter_max(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=scatter_max_fn,
value=sparse_delta,
use_locking=use_locking,
name=name)
def scatter_update(var, sparse_delta, use_locking=False, name=None):
scatter_update_fn = lambda var, *a, **kw: var.scatter_update(*a, **kw)
return var._update( # pylint: disable=protected-access
update_fn=scatter_update_fn,
value=sparse_delta,
use_locking=use_locking,
name=name)
def get_current_replica_id_as_int():
"""Returns the current replica ID as an integer, or `None`."""
replica_context = ds_context.get_replica_context()
if replica_context:
replica_id = replica_context._replica_id # pylint: disable=protected-access
if not isinstance(replica_id, int):
replica_id = tensor_util.constant_value(replica_id)
else:
replica_id = distribute_lib.get_update_replica_id()
return replica_id
def assign_on_device(device, variable, tensor):
with ops.device(device):
return variable.assign(tensor)
def assign_add_on_device(device, variable, tensor):
with ops.device(device):
return variable.assign_add(tensor)
def assign_sub_on_device(device, variable, tensor):
with ops.device(device):
return variable.assign_sub(tensor)
def assert_replica_context(strategy):
replica_context = ds_context.get_replica_context()
if not replica_context:
raise RuntimeError(
"Replica-local variables may only be assigned in a replica context.")
if replica_context.strategy is not strategy:
raise RuntimeError(
"Replica-local variables may only be assigned in a replica context.")
def apply_aggregation(strategy, value, aggregation, destinations):
if aggregation == vs.VariableAggregation.ONLY_FIRST_REPLICA:
return strategy.extended.broadcast_to(
strategy.experimental_local_results(value)[0],
destinations=destinations)
reduce_op = reduce_util.ReduceOp.from_variable_aggregation(aggregation)
return strategy.extended.reduce_to(reduce_op, value, destinations)
aggregation_error_msg = (
"You must specify an aggregation method to update a "
"{variable_type} in Replica Context. You can do so by passing "
"an explicit value for argument `aggregation` to tf.Variable(..)."
"e.g. `tf.Variable(..., aggregation=tf.VariableAggregation.SUM)`"
"`tf.VariableAggregation` lists the possible aggregation methods."
"This is required because {variable_type} should always be "
"kept in sync. When updating them or assigning to them in a "
"replica context, we automatically try to aggregate the values "
"before updating the variable. For this aggregation, we need to "
"know the aggregation method. "
"Another alternative is to not try to update such "
"{variable_type} in replica context, but in cross replica "
"context. You can enter cross replica context by calling "
"`tf.distribute.get_replica_context().merge_call(merge_fn, ..)`."
"Inside `merge_fn`, you can then update the {variable_type} "
"using `tf.distribute.StrategyExtended.update()`.")
scatter_error_msg = ("{op_name} is only supported for mirrored "
"variable (variable created within certain "
"`tf.distribute.Strategy` scope) with NONE or "
"`ONLY_FIRST_REPLICA` aggregation, got: {aggregation}.")
def is_saving_non_distributed():
"""Returns whether we're saving a non-distributed version of the model.
It returns True iff we are in saving context and are saving a non-distributed
version of the model. That is, SaveOptions.experimental_variable_policy is
NONE.
Returns:
A boolean.
"""
if not save_context.in_save_context():
return False
options = save_context.get_save_options()
return (options.experimental_variable_policy !=
save_options.VariablePolicy.EXPAND_DISTRIBUTED_VARIABLES)
def mark_as_unsaveable():
"""Marks the function as unsaveable if not inside save context."""
if ops.inside_function() and not save_context.in_save_context():
ops.get_default_graph().mark_as_unsaveable("""
ConcreteFunction that uses distributed variables in certain way cannot be saved.
If you're saving with
tf.saved_model.save(..., signatures=f.get_concrete_function())
do
@tf.function(input_signature=...)
def f_with_input_signature():
...
tf.saved_model.save(..., signatures=f_with_input_signature)`
instead.""")
| apache-2.0 |
drmrd/ansible | test/units/playbook/test_attribute.py | 119 | 1823 | # (c) 2015, Marius Gedminas <marius@gedmin.as>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible.compat.tests import unittest
from ansible.playbook.attribute import Attribute
class TestAttribute(unittest.TestCase):
def setUp(self):
self.one = Attribute(priority=100)
self.two = Attribute(priority=0)
def test_eq(self):
self.assertTrue(self.one == self.one)
self.assertFalse(self.one == self.two)
def test_ne(self):
self.assertFalse(self.one != self.one)
self.assertTrue(self.one != self.two)
def test_lt(self):
self.assertFalse(self.one < self.one)
self.assertTrue(self.one < self.two)
self.assertFalse(self.two < self.one)
def test_gt(self):
self.assertFalse(self.one > self.one)
self.assertFalse(self.one > self.two)
self.assertTrue(self.two > self.one)
def test_le(self):
self.assertTrue(self.one <= self.one)
self.assertTrue(self.one <= self.two)
self.assertFalse(self.two <= self.one)
def test_ge(self):
self.assertTrue(self.one >= self.one)
self.assertFalse(self.one >= self.two)
self.assertTrue(self.two >= self.one)
| gpl-3.0 |
wunderlins/learning | python/django/lib/python2.7/site-packages/django/contrib/gis/geoip2/base.py | 335 | 9054 | import os
import socket
import geoip2.database
from django.conf import settings
from django.core.validators import ipv4_re
from django.utils import six
from django.utils.ipv6 import is_valid_ipv6_address
from .resources import City, Country
# Creating the settings dictionary with any settings, if needed.
GEOIP_SETTINGS = {
'GEOIP_PATH': getattr(settings, 'GEOIP_PATH', None),
'GEOIP_CITY': getattr(settings, 'GEOIP_CITY', 'GeoLite2-City.mmdb'),
'GEOIP_COUNTRY': getattr(settings, 'GEOIP_COUNTRY', 'GeoLite2-Country.mmdb'),
}
class GeoIP2Exception(Exception):
pass
class GeoIP2(object):
# The flags for GeoIP memory caching.
# Try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
MODE_AUTO = 0
# Use the C extension with memory map.
MODE_MMAP_EXT = 1
# Read from memory map. Pure Python.
MODE_MMAP = 2
# Read database as standard file. Pure Python.
MODE_FILE = 4
# Load database into memory. Pure Python.
MODE_MEMORY = 8
cache_options = {opt: None for opt in (0, 1, 2, 4, 8)}
# Paths to the city & country binary databases.
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initialize the GeoIP object. No parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP datasets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.mmdb) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH setting.
* cache: The cache settings when opening up the GeoIP datasets. May be
an integer in (0, 1, 2, 4, 8) corresponding to the MODE_AUTO,
MODE_MMAP_EXT, MODE_MMAP, MODE_FILE, and MODE_MEMORY,
`GeoIPOptions` C API settings, respectively. Defaults to 0,
meaning MODE_AUTO.
* country: The name of the GeoIP country data file. Defaults to
'GeoLite2-Country.mmdb'; overrides the GEOIP_COUNTRY setting.
* city: The name of the GeoIP city data file. Defaults to
'GeoLite2-City.mmdb'; overrides the GEOIP_CITY setting.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = cache
else:
raise GeoIP2Exception('Invalid GeoIP caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS['GEOIP_PATH']
if not path:
raise GeoIP2Exception('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, six.string_types):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try to open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS['GEOIP_COUNTRY'])
if os.path.isfile(country_db):
self._country = geoip2.database.Reader(country_db, mode=cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS['GEOIP_CITY'])
if os.path.isfile(city_db):
self._city = geoip2.database.Reader(city_db, mode=cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure out
# whether the given database path is for the GeoIP country or city
# databases.
reader = geoip2.database.Reader(path, mode=cache)
db_type = reader.metadata().database_type
if db_type.endswith('City'):
# GeoLite City database detected.
self._city = reader
self._city_file = path
elif db_type.endswith('Country'):
# GeoIP Country database detected.
self._country = reader
self._country_file = path
else:
raise GeoIP2Exception('Unable to recognize database edition: %s' % db_type)
else:
raise GeoIP2Exception('GeoIP path must be a valid file or directory.')
@property
def _reader(self):
if self._country:
return self._country
else:
return self._city
@property
def _country_or_city(self):
if self._country:
return self._country.country
else:
return self._city.city
def __del__(self):
# Cleanup any GeoIP file handles lying around.
if self._reader:
self._reader.close()
def __repr__(self):
meta = self._reader.metadata()
version = '[v%s.%s]' % (meta.binary_format_major_version, meta.binary_format_minor_version)
return '<%(cls)s %(version)s _country_file="%(country)s", _city_file="%(city)s">' % {
'cls': self.__class__.__name__,
'version': version,
'country': self._country_file,
'city': self._city_file,
}
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, six.string_types):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIP2Exception('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIP2Exception('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIP2Exception('Invalid GeoIP city data file: %s' % self._city_file)
# Return the query string back to the caller. GeoIP2 only takes IP addresses.
if not (ipv4_re.match(query) or is_valid_ipv6_address(query)):
query = socket.gethostbyname(query)
return query
def city(self, query):
"""
Return a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
enc_query = self._check_query(query, city=True)
return City(self._city.city(enc_query))
def country_code(self, query):
"Return the country code for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
return self.country(enc_query)['country_code']
def country_name(self, query):
"Return the country name for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
return self.country(enc_query)['country_name']
def country(self, query):
"""
Return a dictionary with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
enc_query = self._check_query(query, city_or_country=True)
return Country(self._country_or_city(enc_query))
# #### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None:
return None
else:
return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Return a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Return a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Return a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
# #### GeoIP Database Information Routines ####
@property
def info(self):
"Return information about the GeoIP library and databases in use."
meta = self._reader.metadata()
return 'GeoIP Library:\n\t%s.%s\n' % (meta.binary_format_major_version, meta.binary_format_minor_version)
@classmethod
def open(cls, full_path, cache):
return GeoIP2(full_path, cache)
| gpl-2.0 |
runekaagaard/django-contrib-locking | tests/utils_tests/test_safestring.py | 8 | 3704 | from __future__ import unicode_literals
from django.template import Template, Context
from django.test import TestCase
from django.utils.encoding import force_text, force_bytes
from django.utils.functional import lazy
from django.utils.safestring import mark_safe, mark_for_escaping, SafeData, EscapeData
from django.utils import six
from django.utils import text
from django.utils import html
lazystr = lazy(force_text, six.text_type)
lazybytes = lazy(force_bytes, bytes)
class customescape(six.text_type):
def __html__(self):
# implement specific and obviously wrong escaping
# in order to be able to tell for sure when it runs
return self.replace('<', '<<').replace('>', '>>')
class SafeStringTest(TestCase):
def assertRenderEqual(self, tpl, expected, **context):
context = Context(context)
tpl = Template(tpl)
self.assertEqual(tpl.render(context), expected)
def test_mark_safe(self):
s = mark_safe('a&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
self.assertRenderEqual('{{ s|force_escape }}', 'a&b', s=s)
def test_mark_safe_object_implementing_dunder_html(self):
e = customescape('<a&b>')
s = mark_safe(e)
self.assertIs(s, e)
self.assertRenderEqual('{{ s }}', '<<a&b>>', s=s)
self.assertRenderEqual('{{ s|force_escape }}', '<a&b>', s=s)
def test_mark_safe_lazy(self):
s = lazystr('a&b')
b = lazybytes(b'a&b')
self.assertIsInstance(mark_safe(s), SafeData)
self.assertIsInstance(mark_safe(b), SafeData)
self.assertRenderEqual('{{ s }}', 'a&b', s=mark_safe(s))
def test_mark_safe_object_implementing_dunder_str(self):
class Obj(object):
def __str__(self):
return '<obj>'
s = mark_safe(Obj())
self.assertRenderEqual('{{ s }}', '<obj>', s=s)
def test_mark_safe_result_implements_dunder_html(self):
self.assertEqual(mark_safe('a&b').__html__(), 'a&b')
def test_mark_safe_lazy_result_implements_dunder_html(self):
self.assertEqual(mark_safe(lazystr('a&b')).__html__(), 'a&b')
def test_mark_for_escaping(self):
s = mark_for_escaping('a&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
self.assertRenderEqual('{{ s }}', 'a&b', s=mark_for_escaping(s))
def test_mark_for_escaping_object_implementing_dunder_html(self):
e = customescape('<a&b>')
s = mark_for_escaping(e)
self.assertIs(s, e)
self.assertRenderEqual('{{ s }}', '<<a&b>>', s=s)
self.assertRenderEqual('{{ s|force_escape }}', '<a&b>', s=s)
def test_mark_for_escaping_lazy(self):
s = lazystr('a&b')
b = lazybytes(b'a&b')
self.assertIsInstance(mark_for_escaping(s), EscapeData)
self.assertIsInstance(mark_for_escaping(b), EscapeData)
self.assertRenderEqual('{% autoescape off %}{{ s }}{% endautoescape %}', 'a&b', s=mark_for_escaping(s))
def test_mark_for_escaping_object_implementing_dunder_str(self):
class Obj(object):
def __str__(self):
return '<obj>'
s = mark_for_escaping(Obj())
self.assertRenderEqual('{{ s }}', '<obj>', s=s)
def test_add_lazy_safe_text_and_safe_text(self):
s = html.escape(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
s = html.escapejs(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
s = text.slugify(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
| bsd-3-clause |
jean/python-docx | features/steps/parfmt.py | 6 | 7800 | # encoding: utf-8
"""
Step implementations for paragraph format-related features.
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from behave import given, then, when
from docx import Document
from docx.enum.text import WD_ALIGN_PARAGRAPH, WD_LINE_SPACING
from docx.shared import Pt
from helpers import test_docx
# given ===================================================
@given('a paragraph format having {prop_name} set {setting}')
def given_a_paragraph_format_having_prop_set(context, prop_name, setting):
style_name = {
'to inherit': 'Normal',
'On': 'Base',
'Off': 'Citation',
}[setting]
document = Document(test_docx('sty-known-styles'))
context.paragraph_format = document.styles[style_name].paragraph_format
@given('a paragraph format having {setting} line spacing')
def given_a_paragraph_format_having_setting_line_spacing(context, setting):
style_name = {
'inherited': 'Normal',
'14 pt': 'Base',
'double': 'Citation',
}[setting]
document = Document(test_docx('sty-known-styles'))
context.paragraph_format = document.styles[style_name].paragraph_format
@given('a paragraph format having {setting} space {side}')
def given_a_paragraph_format_having_setting_spacing(context, setting, side):
style_name = 'Normal' if setting == 'inherited' else 'Base'
document = Document(test_docx('sty-known-styles'))
context.paragraph_format = document.styles[style_name].paragraph_format
@given('a paragraph format having {type} alignment')
def given_a_paragraph_format_having_align_type_alignment(context, type):
style_name = {
'inherited': 'Normal',
'center': 'Base',
'right': 'Citation',
}[type]
document = Document(test_docx('sty-known-styles'))
context.paragraph_format = document.styles[style_name].paragraph_format
@given('a paragraph format having {type} indent of {value}')
def given_a_paragraph_format_having_type_indent_value(context, type, value):
style_name = {
'inherit': 'Normal',
'18 pt': 'Base',
'17.3 pt': 'Base',
'-17.3 pt': 'Citation',
'46.1 pt': 'Citation',
}[value]
document = Document(test_docx('sty-known-styles'))
context.paragraph_format = document.styles[style_name].paragraph_format
# when ====================================================
@when('I assign {value} to paragraph_format.line_spacing')
def when_I_assign_value_to_paragraph_format_line_spacing(context, value):
new_value = {
'Pt(14)': Pt(14),
'2': 2,
}.get(value)
new_value = float(value) if new_value is None else new_value
context.paragraph_format.line_spacing = new_value
@when('I assign {value} to paragraph_format.line_spacing_rule')
def when_I_assign_value_to_paragraph_format_line_rule(context, value):
new_value = {
'None': None,
'WD_LINE_SPACING.EXACTLY': WD_LINE_SPACING.EXACTLY,
'WD_LINE_SPACING.MULTIPLE': WD_LINE_SPACING.MULTIPLE,
'WD_LINE_SPACING.SINGLE': WD_LINE_SPACING.SINGLE,
'WD_LINE_SPACING.DOUBLE': WD_LINE_SPACING.DOUBLE,
'WD_LINE_SPACING.AT_LEAST': WD_LINE_SPACING.AT_LEAST,
'WD_LINE_SPACING.ONE_POINT_FIVE': WD_LINE_SPACING.ONE_POINT_FIVE,
}[value]
paragraph_format = context.paragraph_format
paragraph_format.line_spacing_rule = new_value
@when('I assign {value} to paragraph_format.alignment')
def when_I_assign_value_to_paragraph_format_alignment(context, value):
new_value = {
'None': None,
'WD_ALIGN_PARAGRAPH.CENTER': WD_ALIGN_PARAGRAPH.CENTER,
'WD_ALIGN_PARAGRAPH.RIGHT': WD_ALIGN_PARAGRAPH.RIGHT,
}[value]
paragraph_format = context.paragraph_format
paragraph_format.alignment = new_value
@when('I assign {value} to paragraph_format.space_{side}')
def when_I_assign_value_to_paragraph_format_space(context, value, side):
paragraph_format = context.paragraph_format
prop_name = 'space_%s' % side
new_value = {
'None': None,
'Pt(12)': Pt(12),
'Pt(18)': Pt(18),
}[value]
setattr(paragraph_format, prop_name, new_value)
@when('I assign {value} to paragraph_format.{type_}_indent')
def when_I_assign_value_to_paragraph_format_indent(context, value, type_):
paragraph_format = context.paragraph_format
prop_name = '%s_indent' % type_
value = None if value == 'None' else Pt(float(value.split()[0]))
setattr(paragraph_format, prop_name, value)
@when('I assign {value} to paragraph_format.{prop_name}')
def when_I_assign_value_to_paragraph_format_prop(context, value, prop_name):
paragraph_format = context.paragraph_format
value = {'None': None, 'True': True, 'False': False}[value]
setattr(paragraph_format, prop_name, value)
# then =====================================================
@then('paragraph_format.alignment is {value}')
def then_paragraph_format_alignment_is_value(context, value):
expected_value = {
'None': None,
'WD_ALIGN_PARAGRAPH.LEFT': WD_ALIGN_PARAGRAPH.LEFT,
'WD_ALIGN_PARAGRAPH.CENTER': WD_ALIGN_PARAGRAPH.CENTER,
'WD_ALIGN_PARAGRAPH.RIGHT': WD_ALIGN_PARAGRAPH.RIGHT,
}[value]
paragraph_format = context.paragraph_format
assert paragraph_format.alignment == expected_value
@then('paragraph_format.line_spacing is {value}')
def then_paragraph_format_line_spacing_is_value(context, value):
expected_value = (
None if value == 'None' else
float(value) if '.' in value else
int(value)
)
paragraph_format = context.paragraph_format
if expected_value is None or isinstance(expected_value, int):
assert paragraph_format.line_spacing == expected_value
else:
assert abs(paragraph_format.line_spacing - expected_value) < 0.001
@then('paragraph_format.line_spacing_rule is {value}')
def then_paragraph_format_line_spacing_rule_is_value(context, value):
expected_value = {
'None': None,
'WD_LINE_SPACING.EXACTLY': WD_LINE_SPACING.EXACTLY,
'WD_LINE_SPACING.MULTIPLE': WD_LINE_SPACING.MULTIPLE,
'WD_LINE_SPACING.SINGLE': WD_LINE_SPACING.SINGLE,
'WD_LINE_SPACING.DOUBLE': WD_LINE_SPACING.DOUBLE,
'WD_LINE_SPACING.AT_LEAST': WD_LINE_SPACING.AT_LEAST,
'WD_LINE_SPACING.ONE_POINT_FIVE': WD_LINE_SPACING.ONE_POINT_FIVE,
}[value]
paragraph_format = context.paragraph_format
assert paragraph_format.line_spacing_rule == expected_value
@then('paragraph_format.space_{side} is {value}')
def then_paragraph_format_space_side_is_value(context, side, value):
expected_value = None if value == 'None' else int(value)
prop_name = 'space_%s' % side
paragraph_format = context.paragraph_format
actual_value = getattr(paragraph_format, prop_name)
assert actual_value == expected_value
@then('paragraph_format.{type_}_indent is {value}')
def then_paragraph_format_type_indent_is_value(context, type_, value):
expected_value = None if value == 'None' else int(value)
prop_name = '%s_indent' % type_
paragraph_format = context.paragraph_format
actual_value = getattr(paragraph_format, prop_name)
assert actual_value == expected_value
@then('paragraph_format.{prop_name} is {value}')
def then_paragraph_format_prop_name_is_value(context, prop_name, value):
expected_value = {'None': None, 'True': True, 'False': False}[value]
paragraph_format = context.paragraph_format
actual_value = getattr(paragraph_format, prop_name)
assert actual_value == expected_value
| mit |
gavinhungry/hashbrowns | hashbrowns.py | 1 | 3014 | #!/usr/bin/env python2
#
# Name: hashbrowns
# Auth: Gavin Lloyd <gavinhungry@gmail.com>
# Desc: Provides cryptographic hashes with a minimal UI
#
import os, sys
import pygtk, gtk
import hashlib
import pango
import re
class Hashbrowns:
def __init__(self, filename):
self.hash_algs = ['md5', 'sha1', 'sha256', 'sha512', 'whirlpool']
self.filename = filename
# attempt to open the file for reading
try:
self.fd = open(self.filename, 'rb')
except IOError:
error = 'File is not readable: ' + self.filename;
dlg = gtk.MessageDialog(type=gtk.MESSAGE_ERROR,
buttons=gtk.BUTTONS_OK,
message_format=error)
dlg.run()
sys.exit(error)
# with the file opened, setup the window
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.set_title('Hashbrowns: ' + os.path.basename(self.filename))
window.connect('key-press-event', lambda w,e:
e.keyval == gtk.keysyms.Escape and gtk.main_quit())
window.connect('destroy', self.quit)
window.set_position(gtk.WIN_POS_CENTER)
window.set_border_width(5)
window.set_resizable(False)
vbox = gtk.VBox(homogeneous=False, spacing=5)
hboxt = gtk.HBox(homogeneous=False, spacing=5)
hboxh = gtk.HBox(homogeneous=False, spacing=5)
self.hash_box = gtk.Entry()
self.hash_box.modify_font(pango.FontDescription('monospace 8'))
self.hash_box.set_editable(False)
self.hash_box.set_width_chars(48)
hboxt.add(self.hash_box)
# create button for each hash
for alg in sorted(self.hash_algs):
try:
hashlib.new(alg)
except ValueError:
sys.stderr.write(alg + ': not supported, skipping\n')
else:
# uppercase for algorithms that end with a number, eg: SHA512
# capitalized labels for the rest, eg: Whirlpool
label = alg.upper() if re.search("\d$", alg) else alg.capitalize()
button = gtk.Button(label)
button.connect('clicked', self.get_hash, alg)
hboxh.add(button)
cbButton = gtk.Button()
cbLabel = gtk.Label()
cbLabel.set_markup('<b>Copy to Clipboard</b>')
cbButton.add(cbLabel)
cbButton.connect('clicked', self.copy)
hboxh.add(cbButton)
vbox.add(hboxt)
vbox.add(hboxh)
window.add(vbox)
window.show_all()
gtk.main()
# hash file and place output in text box
def get_hash(self, button, alg):
m = hashlib.new(alg)
for data in iter(lambda: self.fd.read(128 * m.block_size), ''):
m.update(data)
self.fd.seek(0)
self.hash = m.hexdigest()
self.hash_box.set_text(self.hash)
# copy to clipboard
def copy(self, button):
if (len(self.hash) > 0):
clipboard.set_text(self.hash)
clipboard.store()
def quit(self, window):
self.fd.close()
gtk.main_quit()
if __name__ == '__main__':
clipboard = gtk.clipboard_get()
if len(sys.argv) != 2:
sys.exit('usage: ' + sys.argv[0] + ' FILE')
hb = Hashbrowns(sys.argv[1])
| mit |
brianjimenez/lightdock | lightdock/pdbutil/PDBIO.py | 1 | 7384 | """Parses Atomic coordinates entries from PDB files"""
import math
from lightdock.error.lightdock_errors import PDBParsingError, PDBParsingWarning
from lightdock.structure.atom import Atom, HetAtom
from lightdock.structure.residue import Residue
from lightdock.structure.chain import Chain
from lightdock.util.logger import LoggingManager
log = LoggingManager.get_logger('pdb')
def cstrip(string):
"""Remove unwanted symbols from string."""
return string.strip(' \t\n\r')
def read_atom_line(line, line_type='', atoms_to_ignore=[]):
"""Parses a PDB file line starting with 'ATOM' or 'HETATM'"""
element = cstrip(line[76:78])
try:
x = float(line[30:38])
y = float(line[38:46])
z = float(line[46:54])
if math.isnan(x) or math.isnan(y) or math.isnan(z):
raise Exception()
except:
raise PDBParsingError("Wrong coordinates in '%s'" % line)
try:
atom_number = int(line[6:11])
except ValueError:
raise PDBParsingError("Wrong atom number in '%s'" % line)
atom_name = cstrip(line[12:16])
atom_alternative = cstrip(line[16])
residue_name = cstrip(line[17:21])
chain_id = cstrip(line[21])
residue_ext = line[26]
try:
residue_number = int(line[22:26])
except ValueError:
raise PDBParsingError("Wrong residue number in '%s'" % line)
if ('H' in atoms_to_ignore and atom_name[0] == 'H') or atom_name in atoms_to_ignore:
raise PDBParsingWarning("Ignored atom %s.%s.%s %s" % (chain_id,
residue_name,
residue_number,
atom_name))
try:
occupancy = float(line[54:60])
except:
occupancy = 1.0
try:
b_factor = float(line[60:66])
except:
b_factor = 0.0
if not line_type:
line_type = line[0:6].strip()
if line_type == 'ATOM':
return Atom(atom_number, atom_name, atom_alternative, chain_id,
residue_name, residue_number, residue_ext,
x, y, z, occupancy, b_factor, element)
else:
return HetAtom(atom_number, atom_name, atom_alternative, chain_id,
residue_name, residue_number, residue_ext,
x, y, z, occupancy, b_factor, element)
def parse_complex_from_file(input_file_name, atoms_to_ignore=[], verbose=False):
"""Reads and parses a given input_file_name PDB file.
TODO: Check if chain have been already created and insert it into the first one
"""
lines = file(input_file_name).readlines()
atoms = []
residues = []
chains = []
num_models = 0
last_chain_id = '#'
last_residue_name = '#'
last_residue_number = '#'
current_chain = None
current_residue = None
for line in lines:
# Only first model is going to be read
if num_models <= 1:
line_type = line[0:6].strip()
if line_type == "MODEL":
num_models += 1
if num_models > 1:
log.warning('Multiple models found in %s. Only first model will be used.' % input_file_name)
elif line_type == "ATOM" or line_type == "HETATM":
try:
atom = read_atom_line(line, line_type, atoms_to_ignore)
atoms.append(atom)
except PDBParsingWarning, warning:
if verbose:
print warning
continue
if last_chain_id != atom.chain_id:
last_chain_id = atom.chain_id
current_chain = Chain(last_chain_id)
chains.append(current_chain)
if last_residue_name != atom.residue_name or last_residue_number != atom.residue_number:
last_residue_name = atom.residue_name
last_residue_number = atom.residue_number
current_residue = Residue(atom.residue_name, atom.residue_number)
residues.append(current_residue)
current_chain.residues.append(current_residue)
current_residue.atoms.append(atom)
# Set backbone and side-chain atoms
for residue in residues:
residue.set_backbone_and_sidechain()
try:
residue.check()
except Exception, e:
log.warning("Possible problem: %s" % str(e))
return atoms, residues, chains
def _format_atom_name(atom_name):
"""Format ATOM name with correct padding"""
if len(atom_name) == 4:
return atom_name
else:
return " %s" % atom_name
def write_atom_line(atom, atom_coordinates, output):
"""Writes a PDB file format line to output."""
if atom.__class__.__name__ == "HetAtom":
atom_type = 'HETATM'
else:
atom_type = 'ATOM '
line = "%6s%5d %-4s%-1s%3s%2s%4d%1s %8.3f%8.3f%8.3f%6.2f%6.2f%12s\n" % (atom_type,
atom.number,
_format_atom_name(atom.name),
atom.alternative,
atom.residue_name,
atom.chain_id,
atom.residue_number,
atom.residue_ext,
atom_coordinates[atom.index][0],
atom_coordinates[atom.index][1],
atom_coordinates[atom.index][2],
atom.occupancy,
atom.b_factor,
atom.element)
output.write(line)
def write_pdb_to_file(molecule, output_file_name, atom_coordinates=None, structure_id=0):
"""Writes a Complex structure to a file in PDB format."""
output_file = file(output_file_name, "a")
for atom in molecule.atoms:
if atom_coordinates is not None:
write_atom_line(atom, atom_coordinates, output_file)
else:
write_atom_line(atom, molecule.atom_coordinates[structure_id], output_file)
output_file.close()
def create_pdb_from_points(pdb_file_name, points, atom_type='H'):
"""Creates a PDB file which contains an atom_type atom for each point
in points list.
"""
pdb_file = open(pdb_file_name, 'w')
for index, point in enumerate(points):
line = "ATOM %5d %-4s XXX 1 %8.3f%8.3f%8.3f\n" % (index, atom_type,
point[0], point[1], point[2])
pdb_file.write(line)
pdb_file.close()
| gpl-3.0 |
kuwa32/chainer | tests/functions_tests/test_nonparameterized_linear.py | 7 | 2368 | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
if cuda.available:
cuda.init()
class TestNonparameterizedLinear(unittest.TestCase):
def setUp(self):
self.W = numpy.random.uniform(
-1, 1, (2, 3)).astype(numpy.float32)
self.b = numpy.random.uniform(
-1, 1, 2).astype(numpy.float32)
self.x = numpy.random.uniform(-1, 1, (4, 3)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (4, 2)).astype(numpy.float32)
self.y = self.x.dot(self.W.T) + self.b
def check_forward(self, x_data, W_data, b_data, y_expect):
x = chainer.Variable(x_data)
W = chainer.Variable(W_data)
b = chainer.Variable(b_data)
y = functions.linear(x, W, b)
gradient_check.assert_allclose(y_expect, y.data)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x, self.W, self.b,
self.x.dot(self.W.T) + self.b)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(
cuda.to_gpu(self.x), cuda.to_gpu(self.W), cuda.to_gpu(self.b),
cuda.to_gpu(self.x.dot(self.W.T) + self.b))
def check_backward(self, x_data, W_data, b_data, y_grad):
x = chainer.Variable(x_data)
W = chainer.Variable(W_data)
b = chainer.Variable(b_data)
y = functions.linear(x, W, b)
y.grad = y_grad
y.backward()
func = y.creator
f = lambda: func.forward((x.data, W.data, b.data))
gx, gW, gb = gradient_check.numerical_grad(
f, (x.data, W.data, b.data), (y.grad,), eps=1e-2)
gradient_check.assert_allclose(gx, x.grad)
gradient_check.assert_allclose(gW, W.grad)
gradient_check.assert_allclose(gb, b.grad)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.W, self.b, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.W),
cuda.to_gpu(self.b), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
| mit |
jiachenning/odoo | addons/base_action_rule/tests/base_action_rule_test.py | 395 | 7455 | from openerp import SUPERUSER_ID
from openerp.tests import common
from .. import test_models
class base_action_rule_test(common.TransactionCase):
def setUp(self):
"""*****setUp*****"""
super(base_action_rule_test, self).setUp()
cr, uid = self.cr, self.uid
self.demo = self.registry('ir.model.data').get_object(cr, uid, 'base', 'user_demo').id
self.admin = SUPERUSER_ID
self.model = self.registry('base.action.rule.lead.test')
self.base_action_rule = self.registry('base.action.rule')
def create_filter_done(self, cr, uid, context=None):
filter_pool = self.registry('ir.filters')
return filter_pool.create(cr, uid, {
'name': "Lead is in done state",
'is_default': False,
'model_id': 'base.action.rule.lead.test',
'domain': "[('state','=','done')]",
}, context=context)
def create_filter_draft(self, cr, uid, context=None):
filter_pool = self.registry('ir.filters')
return filter_pool.create(cr, uid, {
'name': "Lead is in draft state",
'is_default': False,
'model_id': "base.action.rule.lead.test",
'domain' : "[('state','=','draft')]",
}, context=context)
def create_lead_test_1(self, cr, uid, context=None):
"""
Create a new lead_test
"""
return self.model.create(cr, uid, {
'name': "Lead Test 1",
'user_id': self.admin,
}, context=context)
def create_rule(self, cr, uid, kind, filter_id=False, filter_pre_id=False, context=None):
"""
The "Rule 1" says that when a lead goes to the 'draft' state, the responsible for that lead changes to user "demo"
"""
return self.base_action_rule.create(cr,uid,{
'name': "Rule 1",
'model_id': self.registry('ir.model').search(cr, uid, [('model','=','base.action.rule.lead.test')], context=context)[0],
'kind': kind,
'filter_pre_id': filter_pre_id,
'filter_id': filter_id,
'act_user_id': self.demo,
}, context=context)
def delete_rules(self, cr, uid, context=None):
""" delete all the rules on model 'base.action.rule.lead.test' """
action_ids = self.base_action_rule.search(cr, uid, [('model', '=', self.model._name)], context=context)
return self.base_action_rule.unlink(cr, uid, action_ids, context=context)
def test_00_check_to_state_draft_pre(self):
"""
Check that a new record (with state = draft) doesn't change its responsible when there is a precondition filter which check that the state is draft.
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
self.create_rule(cr, uid, 'on_write', filter_pre_id=filter_draft)
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.admin)
self.delete_rules(cr, uid)
def test_01_check_to_state_draft_post(self):
"""
Check that a new record changes its responsible when there is a postcondition filter which check that the state is draft.
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
self.create_rule(cr, uid, 'on_create')
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.demo)
self.delete_rules(cr, uid)
def test_02_check_from_draft_to_done_with_steps(self):
"""
A new record will be created and will goes from draft to done state via the other states (open, pending and cancel)
We will create a rule that says in precondition that the record must be in the "draft" state while a postcondition filter says
that the record will be done. If the state goes from 'draft' to 'done' the responsible will change. If those two conditions aren't
verified, the responsible will stay the same
The responsible in that test will never change
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
filter_done = self.create_filter_done(cr, uid)
self.create_rule(cr, uid, 'on_write', filter_pre_id=filter_draft, filter_id=filter_done)
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to open and check that responsible doen't change"""
new_lead.write({'state': 'open'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'open')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to pending and check that responsible doen't change"""
new_lead.write({'state': 'pending'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'pending')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to cancel and check that responsible doen't change"""
new_lead.write({'state': 'cancel'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'cancel')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to done and check that responsible doen't change """
new_lead.write({'state': 'done'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'done')
self.assertEquals(new_lead.user_id.id, self.admin)
self.delete_rules(cr, uid)
def test_02_check_from_draft_to_done_without_steps(self):
"""
A new record will be created and will goes from draft to done in one operation
We will create a rule that says in precondition that the record must be in the "draft" state while a postcondition filter says
that the record will be done. If the state goes from 'draft' to 'done' the responsible will change. If those two conditions aren't
verified, the responsible will stay the same
The responsible in that test will change to user "demo"
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
filter_done = self.create_filter_done(cr, uid)
self.create_rule(cr, uid, 'on_write', filter_pre_id=filter_draft, filter_id=filter_done)
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to done and check that responsible change to Demo_user"""
new_lead.write({'state': 'done'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'done')
self.assertEquals(new_lead.user_id.id, self.demo)
self.delete_rules(cr, uid)
| agpl-3.0 |
andrewcbennett/iris | lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py | 11 | 5682 | # (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the `iris.fileformats.netcdf._load_aux_factory` function."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
import warnings
from iris.coords import DimCoord
from iris.cube import Cube
from iris.fileformats.netcdf import _load_aux_factory
from iris.tests import mock
class TestAtmosphereHybridSigmaPressureCoordinate(tests.IrisTest):
def setUp(self):
standard_name = 'atmosphere_hybrid_sigma_pressure_coordinate'
self.requires = dict(formula_type=standard_name)
coordinates = [(mock.sentinel.b, 'b'), (mock.sentinel.ps, 'ps')]
self.provides = dict(coordinates=coordinates)
self.engine = mock.Mock(requires=self.requires, provides=self.provides)
self.cube = mock.create_autospec(Cube, spec_set=True, instance=True)
# Patch out the check_dependencies functionality.
func = 'iris.aux_factory.HybridPressureFactory._check_dependencies'
patcher = mock.patch(func)
patcher.start()
self.addCleanup(patcher.stop)
def test_formula_terms_ap(self):
self.provides['coordinates'].append((mock.sentinel.ap, 'ap'))
self.requires['formula_terms'] = dict(ap='ap', b='b', ps='ps')
_load_aux_factory(self.engine, self.cube)
# Check cube.add_aux_coord method.
self.assertEqual(self.cube.add_aux_coord.call_count, 0)
# Check cube.add_aux_factory method.
self.assertEqual(self.cube.add_aux_factory.call_count, 1)
args, _ = self.cube.add_aux_factory.call_args
self.assertEqual(len(args), 1)
factory = args[0]
self.assertEqual(factory.delta, mock.sentinel.ap)
self.assertEqual(factory.sigma, mock.sentinel.b)
self.assertEqual(factory.surface_air_pressure, mock.sentinel.ps)
def test_formula_terms_a_p0(self):
coord_a = DimCoord(np.arange(5), units='Pa')
coord_p0 = DimCoord(10, units='1')
coord_expected = DimCoord(np.arange(5) * 10, units='Pa',
long_name='vertical pressure', var_name='ap')
self.provides['coordinates'].extend([(coord_a, 'a'), (coord_p0, 'p0')])
self.requires['formula_terms'] = dict(a='a', b='b', ps='ps', p0='p0')
_load_aux_factory(self.engine, self.cube)
# Check cube.coord_dims method.
self.assertEqual(self.cube.coord_dims.call_count, 1)
args, _ = self.cube.coord_dims.call_args
self.assertEqual(len(args), 1)
self.assertIs(args[0], coord_a)
# Check cube.add_aux_coord method.
self.assertEqual(self.cube.add_aux_coord.call_count, 1)
args, _ = self.cube.add_aux_coord.call_args
self.assertEqual(len(args), 2)
self.assertEqual(args[0], coord_expected)
self.assertIsInstance(args[1], mock.Mock)
# Check cube.add_aux_factory method.
self.assertEqual(self.cube.add_aux_factory.call_count, 1)
args, _ = self.cube.add_aux_factory.call_args
self.assertEqual(len(args), 1)
factory = args[0]
self.assertEqual(factory.delta, coord_expected)
self.assertEqual(factory.sigma, mock.sentinel.b)
self.assertEqual(factory.surface_air_pressure, mock.sentinel.ps)
def test_formula_terms_p0_non_scalar(self):
coord_p0 = DimCoord(np.arange(5))
self.provides['coordinates'].append((coord_p0, 'p0'))
self.requires['formula_terms'] = dict(p0='p0')
with self.assertRaises(ValueError):
_load_aux_factory(self.engine, self.cube)
def test_formula_terms_p0_bounded(self):
coord_a = DimCoord(np.arange(5))
coord_p0 = DimCoord(1, bounds=[0, 2], var_name='p0')
self.provides['coordinates'].extend([(coord_a, 'a'), (coord_p0, 'p0')])
self.requires['formula_terms'] = dict(a='a', b='b', ps='ps', p0='p0')
with warnings.catch_warnings(record=True) as warn:
warnings.simplefilter('always')
_load_aux_factory(self.engine, self.cube)
self.assertEqual(len(warn), 1)
msg = 'Ignoring atmosphere hybrid sigma pressure scalar ' \
'coordinate {!r} bounds.'.format(coord_p0.name())
self.assertEqual(msg, str(warn[0].message))
def test_formula_terms_ap_missing_coords(self):
coordinates = [(mock.sentinel.b, 'b'), (mock.sentinel.ps, 'ps')]
self.provides = dict(coordinates=coordinates)
self.requires['formula_terms'] = dict(ap='ap', b='b', ps='ps')
with mock.patch('warnings.warn') as warn:
_load_aux_factory(self.engine, self.cube)
warn.assert_called_once_with("Unable to find coordinate for variable "
"'ap'")
if __name__ == '__main__':
tests.main()
| gpl-3.0 |
user-none/calibre | src/calibre/ebooks/oeb/transforms/guide.py | 16 | 2308 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
class Clean(object):
'''Clean up guide, leaving only known values '''
def __call__(self, oeb, opts):
self.oeb, self.log, self.opts = oeb, oeb.log, opts
if 'cover' not in self.oeb.guide:
covers = []
for x in ('other.ms-coverimage-standard', 'coverimagestandard',
'other.ms-titleimage-standard', 'other.ms-titleimage',
'other.ms-coverimage', 'other.ms-thumbimage-standard',
'other.ms-thumbimage', 'thumbimagestandard'):
if x in self.oeb.guide:
href = self.oeb.guide[x].href
try:
item = self.oeb.manifest.hrefs[href]
except KeyError:
continue
else:
covers.append([self.oeb.guide[x], len(item.data)])
covers.sort(cmp=lambda x,y:cmp(x[1], y[1]), reverse=True)
if covers:
ref = covers[0][0]
if len(covers) > 1:
self.log('Choosing %s:%s as the cover'%(ref.type, ref.href))
ref.type = 'cover'
self.oeb.guide.refs['cover'] = ref
if ('start' in self.oeb.guide and 'text' not in self.oeb.guide):
# Prefer text to start as per the OPF 2.0 spec
x = self.oeb.guide['start']
self.oeb.guide.add('text', x.title, x.href)
self.oeb.guide.remove('start')
for x in list(self.oeb.guide):
if x.lower() not in {
'cover', 'titlepage', 'masthead', 'toc', 'title-page',
'copyright-page', 'text', 'index', 'glossary',
'acknowledgements', 'bibliography', 'colophon',
'copyright-page', 'dedication', 'epigraph', 'foreword',
'loi', 'lot', 'notes', 'preface'}:
item = self.oeb.guide[x]
if item.title and item.title.lower() == 'start':
continue
self.oeb.guide.remove(x)
| gpl-3.0 |
40223110/w16b_test | static/Brython3.1.1-20150328-091302/Lib/_thread.py | 740 | 4879 | """Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
# Brython-specific to avoid circular references between threading and _threading_local
class _local:
pass | agpl-3.0 |
savoirfairelinux/odoo | addons/account/project/wizard/account_analytic_chart.py | 362 | 2100 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_analytic_chart(osv.osv_memory):
_name = 'account.analytic.chart'
_description = 'Account Analytic Chart'
_columns = {
'from_date': fields.date('From'),
'to_date': fields.date('To'),
}
def analytic_account_chart_open_window(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result_context = {}
if context is None:
context = {}
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_account_analytic_account_tree2')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
data = self.read(cr, uid, ids, [])[0]
if data['from_date']:
result_context.update({'from_date': data['from_date']})
if data['to_date']:
result_context.update({'to_date': data['to_date']})
result['context'] = str(result_context)
return result
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
w1ll1am23/home-assistant | tests/components/homekit/test_aidmanager.py | 8 | 22428 | """Tests for the HomeKit AID manager."""
import os
from unittest.mock import patch
from fnvhash import fnv1a_32
import pytest
from homeassistant.components.homekit.aidmanager import (
AccessoryAidStorage,
get_aid_storage_filename_for_entry_id,
get_system_unique_id,
)
from homeassistant.helpers import device_registry
from homeassistant.helpers.storage import STORAGE_DIR
from tests.common import MockConfigEntry, mock_device_registry, mock_registry
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_aid_generation(hass, device_reg, entity_reg):
"""Test generating aids."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
light_ent = entity_reg.async_get_or_create(
"light", "device", "unique_id", device_id=device_entry.id
)
light_ent2 = entity_reg.async_get_or_create(
"light", "device", "other_unique_id", device_id=device_entry.id
)
remote_ent = entity_reg.async_get_or_create(
"remote", "device", "unique_id", device_id=device_entry.id
)
hass.states.async_set(light_ent.entity_id, "on")
hass.states.async_set(light_ent2.entity_id, "on")
hass.states.async_set(remote_ent.entity_id, "on")
hass.states.async_set("remote.has_no_unique_id", "on")
with patch(
"homeassistant.components.homekit.aidmanager.AccessoryAidStorage.async_schedule_save"
):
aid_storage = AccessoryAidStorage(hass, config_entry)
await aid_storage.async_initialize()
for _ in range(0, 2):
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id)
== 1953095294
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent2.entity_id)
== 1975378727
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(remote_ent.entity_id)
== 3508011530
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id("remote.has_no_unique_id")
== 1751603975
)
aid_storage.delete_aid(get_system_unique_id(light_ent))
aid_storage.delete_aid(get_system_unique_id(light_ent2))
aid_storage.delete_aid(get_system_unique_id(remote_ent))
aid_storage.delete_aid("non-existant-one")
for _ in range(0, 2):
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id)
== 1953095294
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent2.entity_id)
== 1975378727
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(remote_ent.entity_id)
== 3508011530
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id("remote.has_no_unique_id")
== 1751603975
)
async def test_no_aid_collision(hass, device_reg, entity_reg):
"""Test generating aids."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
with patch(
"homeassistant.components.homekit.aidmanager.AccessoryAidStorage.async_schedule_save"
):
aid_storage = AccessoryAidStorage(hass, config_entry)
await aid_storage.async_initialize()
seen_aids = set()
for unique_id in range(0, 202):
ent = entity_reg.async_get_or_create(
"light", "device", unique_id, device_id=device_entry.id
)
hass.states.async_set(ent.entity_id, "on")
aid = aid_storage.get_or_allocate_aid_for_entity_id(ent.entity_id)
assert aid not in seen_aids
seen_aids.add(aid)
async def test_aid_generation_no_unique_ids_handles_collision(
hass, device_reg, entity_reg
):
"""Test colliding aids is stable."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
aid_storage = AccessoryAidStorage(hass, config_entry)
await aid_storage.async_initialize()
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
seen_aids = set()
collisions = []
for light_id in range(0, 220):
entity_id = f"light.light{light_id}"
hass.states.async_set(entity_id, "on")
expected_aid = fnv1a_32(entity_id.encode("utf-8"))
aid = aid_storage.get_or_allocate_aid_for_entity_id(entity_id)
if aid != expected_aid:
collisions.append(entity_id)
assert aid not in seen_aids
seen_aids.add(aid)
light_ent = entity_reg.async_get_or_create(
"light", "device", "unique_id", device_id=device_entry.id
)
hass.states.async_set(light_ent.entity_id, "on")
aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id)
assert not collisions
assert aid_storage.allocations == {
"device.light.unique_id": 1953095294,
"light.light0": 301577847,
"light.light1": 284800228,
"light.light10": 2367138236,
"light.light100": 2822760292,
"light.light101": 2839537911,
"light.light102": 2856315530,
"light.light103": 2873093149,
"light.light104": 2755649816,
"light.light105": 2772427435,
"light.light106": 2789205054,
"light.light107": 2805982673,
"light.light108": 2688539340,
"light.light109": 2705316959,
"light.light11": 2383915855,
"light.light110": 776141037,
"light.light111": 759363418,
"light.light112": 742585799,
"light.light113": 725808180,
"light.light114": 709030561,
"light.light115": 692252942,
"light.light116": 675475323,
"light.light117": 658697704,
"light.light118": 641920085,
"light.light119": 625142466,
"light.light12": 2400693474,
"light.light120": 340070038,
"light.light121": 356847657,
"light.light122": 306514800,
"light.light123": 323292419,
"light.light124": 407180514,
"light.light125": 423958133,
"light.light126": 373625276,
"light.light127": 390402895,
"light.light128": 474290990,
"light.light129": 491068609,
"light.light13": 2417471093,
"light.light130": 440882847,
"light.light131": 424105228,
"light.light132": 474438085,
"light.light133": 457660466,
"light.light134": 373772371,
"light.light135": 356994752,
"light.light136": 407327609,
"light.light137": 390549990,
"light.light138": 575103799,
"light.light139": 558326180,
"light.light14": 2300027760,
"light.light140": 271973824,
"light.light141": 288751443,
"light.light142": 305529062,
"light.light143": 322306681,
"light.light144": 339084300,
"light.light145": 355861919,
"light.light146": 372639538,
"light.light147": 389417157,
"light.light148": 406194776,
"light.light149": 422972395,
"light.light15": 2316805379,
"light.light150": 2520321865,
"light.light151": 2503544246,
"light.light152": 2486766627,
"light.light153": 2469989008,
"light.light154": 2587432341,
"light.light155": 2570654722,
"light.light156": 2553877103,
"light.light157": 2537099484,
"light.light158": 2654542817,
"light.light159": 2637765198,
"light.light16": 2333582998,
"light.light160": 2621134674,
"light.light161": 2637912293,
"light.light162": 2587579436,
"light.light163": 2604357055,
"light.light164": 2554024198,
"light.light165": 2570801817,
"light.light166": 2520468960,
"light.light167": 2537246579,
"light.light168": 2755355626,
"light.light169": 2772133245,
"light.light17": 2350360617,
"light.light170": 2721947483,
"light.light171": 2705169864,
"light.light172": 2755502721,
"light.light173": 2738725102,
"light.light174": 2789057959,
"light.light175": 2772280340,
"light.light176": 2822613197,
"light.light177": 2805835578,
"light.light178": 2587726531,
"light.light179": 2570948912,
"light.light18": 2501359188,
"light.light180": 408166252,
"light.light181": 424943871,
"light.light182": 441721490,
"light.light183": 458499109,
"light.light184": 341055776,
"light.light185": 357833395,
"light.light186": 374611014,
"light.light187": 391388633,
"light.light188": 542387204,
"light.light189": 559164823,
"light.light19": 2518136807,
"light.light190": 508979061,
"light.light191": 492201442,
"light.light192": 475423823,
"light.light193": 458646204,
"light.light194": 441868585,
"light.light195": 425090966,
"light.light196": 408313347,
"light.light197": 391535728,
"light.light198": 643200013,
"light.light199": 626422394,
"light.light2": 335133085,
"light.light20": 522144599,
"light.light200": 1698935589,
"light.light201": 1682157970,
"light.light202": 1665380351,
"light.light203": 1648602732,
"light.light204": 1631825113,
"light.light205": 1615047494,
"light.light206": 1598269875,
"light.light207": 1581492256,
"light.light208": 1833156541,
"light.light209": 1816378922,
"light.light21": 505366980,
"light.light210": 1598122780,
"light.light211": 1614900399,
"light.light212": 1631678018,
"light.light213": 1648455637,
"light.light214": 1531012304,
"light.light215": 1547789923,
"light.light216": 1564567542,
"light.light217": 1581345161,
"light.light218": 1732343732,
"light.light219": 1749121351,
"light.light22": 555699837,
"light.light23": 538922218,
"light.light24": 455034123,
"light.light25": 438256504,
"light.light26": 488589361,
"light.light27": 471811742,
"light.light28": 387923647,
"light.light29": 371146028,
"light.light3": 318355466,
"light.light30": 421331790,
"light.light31": 438109409,
"light.light32": 387776552,
"light.light33": 404554171,
"light.light34": 488442266,
"light.light35": 505219885,
"light.light36": 454887028,
"light.light37": 471664647,
"light.light38": 287110838,
"light.light39": 303888457,
"light.light4": 234467371,
"light.light40": 454048385,
"light.light41": 437270766,
"light.light42": 420493147,
"light.light43": 403715528,
"light.light44": 521158861,
"light.light45": 504381242,
"light.light46": 487603623,
"light.light47": 470826004,
"light.light48": 319827433,
"light.light49": 303049814,
"light.light5": 217689752,
"light.light50": 353235576,
"light.light51": 370013195,
"light.light52": 386790814,
"light.light53": 403568433,
"light.light54": 420346052,
"light.light55": 437123671,
"light.light56": 453901290,
"light.light57": 470678909,
"light.light58": 219014624,
"light.light59": 235792243,
"light.light6": 268022609,
"light.light60": 2266325427,
"light.light61": 2249547808,
"light.light62": 2299880665,
"light.light63": 2283103046,
"light.light64": 2333435903,
"light.light65": 2316658284,
"light.light66": 2366991141,
"light.light67": 2350213522,
"light.light68": 2400546379,
"light.light69": 2383768760,
"light.light7": 251244990,
"light.light70": 554861194,
"light.light71": 571638813,
"light.light72": 521305956,
"light.light73": 538083575,
"light.light74": 487750718,
"light.light75": 504528337,
"light.light76": 454195480,
"light.light77": 470973099,
"light.light78": 420640242,
"light.light79": 437417861,
"light.light8": 167356895,
"light.light80": 2735113021,
"light.light81": 2718335402,
"light.light82": 2701557783,
"light.light83": 2684780164,
"light.light84": 2668002545,
"light.light85": 2651224926,
"light.light86": 2634447307,
"light.light87": 2617669688,
"light.light88": 2600892069,
"light.light89": 2584114450,
"light.light9": 150579276,
"light.light90": 2634300212,
"light.light91": 2651077831,
"light.light92": 2667855450,
"light.light93": 2684633069,
"light.light94": 2567189736,
"light.light95": 2583967355,
"light.light96": 2600744974,
"light.light97": 2617522593,
"light.light98": 2500079260,
"light.light99": 2516856879,
}
await aid_storage.async_save()
await hass.async_block_till_done()
with patch("fnvhash.fnv1a_32", side_effect=Exception):
aid_storage = AccessoryAidStorage(hass, config_entry)
await aid_storage.async_initialize()
assert aid_storage.allocations == {
"device.light.unique_id": 1953095294,
"light.light0": 301577847,
"light.light1": 284800228,
"light.light10": 2367138236,
"light.light100": 2822760292,
"light.light101": 2839537911,
"light.light102": 2856315530,
"light.light103": 2873093149,
"light.light104": 2755649816,
"light.light105": 2772427435,
"light.light106": 2789205054,
"light.light107": 2805982673,
"light.light108": 2688539340,
"light.light109": 2705316959,
"light.light11": 2383915855,
"light.light110": 776141037,
"light.light111": 759363418,
"light.light112": 742585799,
"light.light113": 725808180,
"light.light114": 709030561,
"light.light115": 692252942,
"light.light116": 675475323,
"light.light117": 658697704,
"light.light118": 641920085,
"light.light119": 625142466,
"light.light12": 2400693474,
"light.light120": 340070038,
"light.light121": 356847657,
"light.light122": 306514800,
"light.light123": 323292419,
"light.light124": 407180514,
"light.light125": 423958133,
"light.light126": 373625276,
"light.light127": 390402895,
"light.light128": 474290990,
"light.light129": 491068609,
"light.light13": 2417471093,
"light.light130": 440882847,
"light.light131": 424105228,
"light.light132": 474438085,
"light.light133": 457660466,
"light.light134": 373772371,
"light.light135": 356994752,
"light.light136": 407327609,
"light.light137": 390549990,
"light.light138": 575103799,
"light.light139": 558326180,
"light.light14": 2300027760,
"light.light140": 271973824,
"light.light141": 288751443,
"light.light142": 305529062,
"light.light143": 322306681,
"light.light144": 339084300,
"light.light145": 355861919,
"light.light146": 372639538,
"light.light147": 389417157,
"light.light148": 406194776,
"light.light149": 422972395,
"light.light15": 2316805379,
"light.light150": 2520321865,
"light.light151": 2503544246,
"light.light152": 2486766627,
"light.light153": 2469989008,
"light.light154": 2587432341,
"light.light155": 2570654722,
"light.light156": 2553877103,
"light.light157": 2537099484,
"light.light158": 2654542817,
"light.light159": 2637765198,
"light.light16": 2333582998,
"light.light160": 2621134674,
"light.light161": 2637912293,
"light.light162": 2587579436,
"light.light163": 2604357055,
"light.light164": 2554024198,
"light.light165": 2570801817,
"light.light166": 2520468960,
"light.light167": 2537246579,
"light.light168": 2755355626,
"light.light169": 2772133245,
"light.light17": 2350360617,
"light.light170": 2721947483,
"light.light171": 2705169864,
"light.light172": 2755502721,
"light.light173": 2738725102,
"light.light174": 2789057959,
"light.light175": 2772280340,
"light.light176": 2822613197,
"light.light177": 2805835578,
"light.light178": 2587726531,
"light.light179": 2570948912,
"light.light18": 2501359188,
"light.light180": 408166252,
"light.light181": 424943871,
"light.light182": 441721490,
"light.light183": 458499109,
"light.light184": 341055776,
"light.light185": 357833395,
"light.light186": 374611014,
"light.light187": 391388633,
"light.light188": 542387204,
"light.light189": 559164823,
"light.light19": 2518136807,
"light.light190": 508979061,
"light.light191": 492201442,
"light.light192": 475423823,
"light.light193": 458646204,
"light.light194": 441868585,
"light.light195": 425090966,
"light.light196": 408313347,
"light.light197": 391535728,
"light.light198": 643200013,
"light.light199": 626422394,
"light.light2": 335133085,
"light.light20": 522144599,
"light.light200": 1698935589,
"light.light201": 1682157970,
"light.light202": 1665380351,
"light.light203": 1648602732,
"light.light204": 1631825113,
"light.light205": 1615047494,
"light.light206": 1598269875,
"light.light207": 1581492256,
"light.light208": 1833156541,
"light.light209": 1816378922,
"light.light21": 505366980,
"light.light210": 1598122780,
"light.light211": 1614900399,
"light.light212": 1631678018,
"light.light213": 1648455637,
"light.light214": 1531012304,
"light.light215": 1547789923,
"light.light216": 1564567542,
"light.light217": 1581345161,
"light.light218": 1732343732,
"light.light219": 1749121351,
"light.light22": 555699837,
"light.light23": 538922218,
"light.light24": 455034123,
"light.light25": 438256504,
"light.light26": 488589361,
"light.light27": 471811742,
"light.light28": 387923647,
"light.light29": 371146028,
"light.light3": 318355466,
"light.light30": 421331790,
"light.light31": 438109409,
"light.light32": 387776552,
"light.light33": 404554171,
"light.light34": 488442266,
"light.light35": 505219885,
"light.light36": 454887028,
"light.light37": 471664647,
"light.light38": 287110838,
"light.light39": 303888457,
"light.light4": 234467371,
"light.light40": 454048385,
"light.light41": 437270766,
"light.light42": 420493147,
"light.light43": 403715528,
"light.light44": 521158861,
"light.light45": 504381242,
"light.light46": 487603623,
"light.light47": 470826004,
"light.light48": 319827433,
"light.light49": 303049814,
"light.light5": 217689752,
"light.light50": 353235576,
"light.light51": 370013195,
"light.light52": 386790814,
"light.light53": 403568433,
"light.light54": 420346052,
"light.light55": 437123671,
"light.light56": 453901290,
"light.light57": 470678909,
"light.light58": 219014624,
"light.light59": 235792243,
"light.light6": 268022609,
"light.light60": 2266325427,
"light.light61": 2249547808,
"light.light62": 2299880665,
"light.light63": 2283103046,
"light.light64": 2333435903,
"light.light65": 2316658284,
"light.light66": 2366991141,
"light.light67": 2350213522,
"light.light68": 2400546379,
"light.light69": 2383768760,
"light.light7": 251244990,
"light.light70": 554861194,
"light.light71": 571638813,
"light.light72": 521305956,
"light.light73": 538083575,
"light.light74": 487750718,
"light.light75": 504528337,
"light.light76": 454195480,
"light.light77": 470973099,
"light.light78": 420640242,
"light.light79": 437417861,
"light.light8": 167356895,
"light.light80": 2735113021,
"light.light81": 2718335402,
"light.light82": 2701557783,
"light.light83": 2684780164,
"light.light84": 2668002545,
"light.light85": 2651224926,
"light.light86": 2634447307,
"light.light87": 2617669688,
"light.light88": 2600892069,
"light.light89": 2584114450,
"light.light9": 150579276,
"light.light90": 2634300212,
"light.light91": 2651077831,
"light.light92": 2667855450,
"light.light93": 2684633069,
"light.light94": 2567189736,
"light.light95": 2583967355,
"light.light96": 2600744974,
"light.light97": 2617522593,
"light.light98": 2500079260,
"light.light99": 2516856879,
}
aidstore = get_aid_storage_filename_for_entry_id(config_entry.entry_id)
aid_storage_path = hass.config.path(STORAGE_DIR, aidstore)
if await hass.async_add_executor_job(os.path.exists, aid_storage_path):
await hass.async_add_executor_job(os.unlink, aid_storage_path)
| apache-2.0 |
badock/nova | nova/api/openstack/compute/contrib/createserverext.py | 100 | 1156 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
class Createserverext(extensions.ExtensionDescriptor):
"""Extended support to the Create Server v1.1 API."""
name = "Createserverext"
alias = "os-create-server-ext"
namespace = ("http://docs.openstack.org/compute/ext/"
"createserverext/api/v1.1")
updated = "2011-07-19T00:00:00Z"
def get_resources(self):
res = extensions.ResourceExtension('os-create-server-ext',
inherits='servers')
return [res]
| apache-2.0 |
vrkansagara/ultisnips | pythonx/UltiSnips/vim_state.py | 22 | 4763 | #!/usr/bin/env python
# encoding: utf-8
"""Some classes to conserve Vim's state for comparing over time."""
from collections import deque
from UltiSnips import _vim
from UltiSnips.compatibility import as_unicode, byte2col
from UltiSnips.position import Position
class VimPosition(Position):
"""Represents the current position in the buffer, together with some status
variables that might change our decisions down the line."""
def __init__(self):
pos = _vim.buf.cursor
self._mode = _vim.eval('mode()')
Position.__init__(self, pos.line, pos.col)
@property
def mode(self):
"""Returns the mode() this position was created."""
return self._mode
class VimState(object):
"""Caches some state information from Vim to better guess what editing
tasks the user might have done in the last step."""
def __init__(self):
self._poss = deque(maxlen=5)
self._lvb = None
self._text_to_expect = ''
self._unnamed_reg_cached = False
# We store the cached value of the unnamed register in Vim directly to
# avoid any Unicode issues with saving and restoring the unnamed
# register across the Python bindings. The unnamed register can contain
# data that cannot be coerced to Unicode, and so a simple vim.eval('@"')
# fails badly. Keeping the cached value in Vim directly, sidesteps the
# problem.
_vim.command('let g:_ultisnips_unnamed_reg_cache = ""')
def remember_unnamed_register(self, text_to_expect):
"""Save the unnamed register.
'text_to_expect' is text that we expect
to be contained in the register the next time this method is called -
this could be text from the tabstop that was selected and might have
been overwritten. We will not cache that then.
"""
self._unnamed_reg_cached = True
escaped_text = self._text_to_expect.replace("'", "''")
res = int(_vim.eval('@" != ' + "'" + escaped_text + "'"))
if res:
_vim.command('let g:_ultisnips_unnamed_reg_cache = @"')
self._text_to_expect = text_to_expect
def restore_unnamed_register(self):
"""Restores the unnamed register and forgets what we cached."""
if not self._unnamed_reg_cached:
return
_vim.command('let @" = g:_ultisnips_unnamed_reg_cache')
self._unnamed_reg_cached = False
def remember_position(self):
"""Remember the current position as a previous pose."""
self._poss.append(VimPosition())
def remember_buffer(self, to):
"""Remember the content of the buffer and the position."""
self._lvb = _vim.buf[to.start.line:to.end.line + 1]
self._lvb_len = len(_vim.buf)
self.remember_position()
@property
def diff_in_buffer_length(self):
"""Returns the difference in the length of the current buffer compared
to the remembered."""
return len(_vim.buf) - self._lvb_len
@property
def pos(self):
"""The last remembered position."""
return self._poss[-1]
@property
def ppos(self):
"""The second to last remembered position."""
return self._poss[-2]
@property
def remembered_buffer(self):
"""The content of the remembered buffer."""
return self._lvb[:]
class VisualContentPreserver(object):
"""Saves the current visual selection and the selection mode it was done in
(e.g. line selection, block selection or regular selection.)"""
def __init__(self):
self.reset()
def reset(self):
"""Forget the preserved state."""
self._mode = ''
self._text = as_unicode('')
def conserve(self):
"""Save the last visual selection ond the mode it was made in."""
sl, sbyte = map(int,
(_vim.eval("""line("'<")"""), _vim.eval("""col("'<")""")))
el, ebyte = map(int,
(_vim.eval("""line("'>")"""), _vim.eval("""col("'>")""")))
sc = byte2col(sl, sbyte - 1)
ec = byte2col(el, ebyte - 1)
self._mode = _vim.eval('visualmode()')
_vim_line_with_eol = lambda ln: _vim.buf[ln] + '\n'
if sl == el:
text = _vim_line_with_eol(sl - 1)[sc:ec + 1]
else:
text = _vim_line_with_eol(sl - 1)[sc:]
for cl in range(sl, el - 1):
text += _vim_line_with_eol(cl)
text += _vim_line_with_eol(el - 1)[:ec + 1]
self._text = text
@property
def text(self):
"""The conserved text."""
return self._text
@property
def mode(self):
"""The conserved visualmode()."""
return self._mode
| gpl-3.0 |
xianggong/m2c_unit_test | test/integer/mad_hi_uchar4uchar4uchar4/compile.py | 1861 | 4430 | #!/usr/bin/python
import os
import subprocess
import re
def runCommand(command):
p = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.wait()
return iter(p.stdout.readline, b'')
def dumpRunCommand(command, dump_file_name, postfix):
dumpFile = open(dump_file_name + postfix, "w+")
dumpFile.write(command + "\n")
for line in runCommand(command.split()):
dumpFile.write(line)
def rmFile(file_name):
cmd = "rm -rf " + file_name
runCommand(cmd.split())
def rnm_ir(file_name):
# Append all unnamed variable with prefix 'tmp_'
ir_file_name = file_name + ".ll"
if os.path.isfile(ir_file_name):
fo = open(ir_file_name, "rw+")
lines = fo.readlines()
fo.seek(0)
fo.truncate()
for line in lines:
# Add entry block identifier
if "define" in line:
line += "entry:\n"
# Rename all unnamed variables
line = re.sub('\%([0-9]+)',
r'%tmp_\1',
line.rstrip())
# Also rename branch name
line = re.sub('(\;\ \<label\>\:)([0-9]+)',
r'tmp_\2:',
line.rstrip())
fo.write(line + '\n')
def gen_ir(file_name):
# Directories
root_dir = '../../../'
header_dir = root_dir + "inc/"
# Headers
header = " -I " + header_dir
header += " -include " + header_dir + "m2c_buildin_fix.h "
header += " -include " + header_dir + "clc/clc.h "
header += " -D cl_clang_storage_class_specifiers "
gen_ir = "clang -S -emit-llvm -O0 -target r600-- -mcpu=verde "
cmd_gen_ir = gen_ir + header + file_name + ".cl"
dumpRunCommand(cmd_gen_ir, file_name, ".clang.log")
def asm_ir(file_name):
if os.path.isfile(file_name + ".ll"):
# Command to assemble IR to bitcode
gen_bc = "llvm-as "
gen_bc_src = file_name + ".ll"
gen_bc_dst = file_name + ".bc"
cmd_gen_bc = gen_bc + gen_bc_src + " -o " + gen_bc_dst
runCommand(cmd_gen_bc.split())
def opt_bc(file_name):
if os.path.isfile(file_name + ".bc"):
# Command to optmize bitcode
opt_bc = "opt --mem2reg "
opt_ir_src = file_name + ".bc"
opt_ir_dst = file_name + ".opt.bc"
cmd_opt_bc = opt_bc + opt_ir_src + " -o " + opt_ir_dst
runCommand(cmd_opt_bc.split())
def dis_bc(file_name):
if os.path.isfile(file_name + ".bc"):
# Command to disassemble bitcode
dis_bc = "llvm-dis "
dis_ir_src = file_name + ".opt.bc"
dis_ir_dst = file_name + ".opt.ll"
cmd_dis_bc = dis_bc + dis_ir_src + " -o " + dis_ir_dst
runCommand(cmd_dis_bc.split())
def m2c_gen(file_name):
if os.path.isfile(file_name + ".opt.bc"):
# Command to disassemble bitcode
m2c_gen = "m2c --llvm2si "
m2c_gen_src = file_name + ".opt.bc"
cmd_m2c_gen = m2c_gen + m2c_gen_src
dumpRunCommand(cmd_m2c_gen, file_name, ".m2c.llvm2si.log")
# Remove file if size is 0
if os.path.isfile(file_name + ".opt.s"):
if os.path.getsize(file_name + ".opt.s") == 0:
rmFile(file_name + ".opt.s")
def m2c_bin(file_name):
if os.path.isfile(file_name + ".opt.s"):
# Command to disassemble bitcode
m2c_bin = "m2c --si2bin "
m2c_bin_src = file_name + ".opt.s"
cmd_m2c_bin = m2c_bin + m2c_bin_src
dumpRunCommand(cmd_m2c_bin, file_name, ".m2c.si2bin.log")
def main():
# Commands
for file in os.listdir("./"):
if file.endswith(".cl"):
file_name = os.path.splitext(file)[0]
# Execute commands
gen_ir(file_name)
rnm_ir(file_name)
asm_ir(file_name)
opt_bc(file_name)
dis_bc(file_name)
m2c_gen(file_name)
m2c_bin(file_name)
if __name__ == "__main__":
main()
| gpl-2.0 |
azaghal/ansible | test/support/integration/plugins/modules/postgresql_db.py | 53 | 23381 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: postgresql_db
short_description: Add or remove PostgreSQL databases from a remote host.
description:
- Add or remove PostgreSQL databases from a remote host.
version_added: '0.6'
options:
name:
description:
- Name of the database to add or remove
type: str
required: true
aliases: [ db ]
port:
description:
- Database port to connect (if needed)
type: int
default: 5432
aliases:
- login_port
owner:
description:
- Name of the role to set as owner of the database
type: str
template:
description:
- Template used to create the database
type: str
encoding:
description:
- Encoding of the database
type: str
lc_collate:
description:
- Collation order (LC_COLLATE) to use in the database. Must match collation order of template database unless C(template0) is used as template.
type: str
lc_ctype:
description:
- Character classification (LC_CTYPE) to use in the database (e.g. lower, upper, ...) Must match LC_CTYPE of template database unless C(template0)
is used as template.
type: str
session_role:
description:
- Switch to session_role after connecting. The specified session_role must be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though the session_role were the one that had logged in originally.
type: str
version_added: '2.8'
state:
description:
- The database state.
- C(present) implies that the database should be created if necessary.
- C(absent) implies that the database should be removed if present.
- C(dump) requires a target definition to which the database will be backed up. (Added in Ansible 2.4)
Note that in some PostgreSQL versions of pg_dump, which is an embedded PostgreSQL utility and is used by the module,
returns rc 0 even when errors occurred (e.g. the connection is forbidden by pg_hba.conf, etc.),
so the module returns changed=True but the dump has not actually been done. Please, be sure that your version of
pg_dump returns rc 1 in this case.
- C(restore) also requires a target definition from which the database will be restored. (Added in Ansible 2.4)
- The format of the backup will be detected based on the target name.
- Supported compression formats for dump and restore include C(.pgc), C(.bz2), C(.gz) and C(.xz)
- Supported formats for dump and restore include C(.sql) and C(.tar)
type: str
choices: [ absent, dump, present, restore ]
default: present
target:
description:
- File to back up or restore from.
- Used when I(state) is C(dump) or C(restore).
type: path
version_added: '2.4'
target_opts:
description:
- Further arguments for pg_dump or pg_restore.
- Used when I(state) is C(dump) or C(restore).
type: str
version_added: '2.4'
maintenance_db:
description:
- The value specifies the initial database (which is also called as maintenance DB) that Ansible connects to.
type: str
default: postgres
version_added: '2.5'
conn_limit:
description:
- Specifies the database connection limit.
type: str
version_added: '2.8'
tablespace:
description:
- The tablespace to set for the database
U(https://www.postgresql.org/docs/current/sql-alterdatabase.html).
- If you want to move the database back to the default tablespace,
explicitly set this to pg_default.
type: path
version_added: '2.9'
dump_extra_args:
description:
- Provides additional arguments when I(state) is C(dump).
- Cannot be used with dump-file-format-related arguments like ``--format=d``.
type: str
version_added: '2.10'
seealso:
- name: CREATE DATABASE reference
description: Complete reference of the CREATE DATABASE command documentation.
link: https://www.postgresql.org/docs/current/sql-createdatabase.html
- name: DROP DATABASE reference
description: Complete reference of the DROP DATABASE command documentation.
link: https://www.postgresql.org/docs/current/sql-dropdatabase.html
- name: pg_dump reference
description: Complete reference of pg_dump documentation.
link: https://www.postgresql.org/docs/current/app-pgdump.html
- name: pg_restore reference
description: Complete reference of pg_restore documentation.
link: https://www.postgresql.org/docs/current/app-pgrestore.html
- module: postgresql_tablespace
- module: postgresql_info
- module: postgresql_ping
notes:
- State C(dump) and C(restore) don't require I(psycopg2) since version 2.8.
author: "Ansible Core Team"
extends_documentation_fragment:
- postgres
'''
EXAMPLES = r'''
- name: Create a new database with name "acme"
postgresql_db:
name: acme
# Note: If a template different from "template0" is specified, encoding and locale settings must match those of the template.
- name: Create a new database with name "acme" and specific encoding and locale # settings.
postgresql_db:
name: acme
encoding: UTF-8
lc_collate: de_DE.UTF-8
lc_ctype: de_DE.UTF-8
template: template0
# Note: Default limit for the number of concurrent connections to a specific database is "-1", which means "unlimited"
- name: Create a new database with name "acme" which has a limit of 100 concurrent connections
postgresql_db:
name: acme
conn_limit: "100"
- name: Dump an existing database to a file
postgresql_db:
name: acme
state: dump
target: /tmp/acme.sql
- name: Dump an existing database to a file excluding the test table
postgresql_db:
name: acme
state: dump
target: /tmp/acme.sql
dump_extra_args: --exclude-table=test
- name: Dump an existing database to a file (with compression)
postgresql_db:
name: acme
state: dump
target: /tmp/acme.sql.gz
- name: Dump a single schema for an existing database
postgresql_db:
name: acme
state: dump
target: /tmp/acme.sql
target_opts: "-n public"
# Note: In the example below, if database foo exists and has another tablespace
# the tablespace will be changed to foo. Access to the database will be locked
# until the copying of database files is finished.
- name: Create a new database called foo in tablespace bar
postgresql_db:
name: foo
tablespace: bar
'''
RETURN = r'''
executed_commands:
description: List of commands which tried to run.
returned: always
type: list
sample: ["CREATE DATABASE acme"]
version_added: '2.10'
'''
import os
import subprocess
import traceback
try:
import psycopg2
import psycopg2.extras
except ImportError:
HAS_PSYCOPG2 = False
else:
HAS_PSYCOPG2 = True
import ansible.module_utils.postgres as pgutils
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.database import SQLParseError, pg_quote_identifier
from ansible.module_utils.six import iteritems
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_native
executed_commands = []
class NotSupportedError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def set_owner(cursor, db, owner):
query = 'ALTER DATABASE %s OWNER TO "%s"' % (
pg_quote_identifier(db, 'database'),
owner)
executed_commands.append(query)
cursor.execute(query)
return True
def set_conn_limit(cursor, db, conn_limit):
query = "ALTER DATABASE %s CONNECTION LIMIT %s" % (
pg_quote_identifier(db, 'database'),
conn_limit)
executed_commands.append(query)
cursor.execute(query)
return True
def get_encoding_id(cursor, encoding):
query = "SELECT pg_char_to_encoding(%(encoding)s) AS encoding_id;"
cursor.execute(query, {'encoding': encoding})
return cursor.fetchone()['encoding_id']
def get_db_info(cursor, db):
query = """
SELECT rolname AS owner,
pg_encoding_to_char(encoding) AS encoding, encoding AS encoding_id,
datcollate AS lc_collate, datctype AS lc_ctype, pg_database.datconnlimit AS conn_limit,
spcname AS tablespace
FROM pg_database
JOIN pg_roles ON pg_roles.oid = pg_database.datdba
JOIN pg_tablespace ON pg_tablespace.oid = pg_database.dattablespace
WHERE datname = %(db)s
"""
cursor.execute(query, {'db': db})
return cursor.fetchone()
def db_exists(cursor, db):
query = "SELECT * FROM pg_database WHERE datname=%(db)s"
cursor.execute(query, {'db': db})
return cursor.rowcount == 1
def db_delete(cursor, db):
if db_exists(cursor, db):
query = "DROP DATABASE %s" % pg_quote_identifier(db, 'database')
executed_commands.append(query)
cursor.execute(query)
return True
else:
return False
def db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace):
params = dict(enc=encoding, collate=lc_collate, ctype=lc_ctype, conn_limit=conn_limit, tablespace=tablespace)
if not db_exists(cursor, db):
query_fragments = ['CREATE DATABASE %s' % pg_quote_identifier(db, 'database')]
if owner:
query_fragments.append('OWNER "%s"' % owner)
if template:
query_fragments.append('TEMPLATE %s' % pg_quote_identifier(template, 'database'))
if encoding:
query_fragments.append('ENCODING %(enc)s')
if lc_collate:
query_fragments.append('LC_COLLATE %(collate)s')
if lc_ctype:
query_fragments.append('LC_CTYPE %(ctype)s')
if tablespace:
query_fragments.append('TABLESPACE %s' % pg_quote_identifier(tablespace, 'tablespace'))
if conn_limit:
query_fragments.append("CONNECTION LIMIT %(conn_limit)s" % {"conn_limit": conn_limit})
query = ' '.join(query_fragments)
executed_commands.append(cursor.mogrify(query, params))
cursor.execute(query, params)
return True
else:
db_info = get_db_info(cursor, db)
if (encoding and get_encoding_id(cursor, encoding) != db_info['encoding_id']):
raise NotSupportedError(
'Changing database encoding is not supported. '
'Current encoding: %s' % db_info['encoding']
)
elif lc_collate and lc_collate != db_info['lc_collate']:
raise NotSupportedError(
'Changing LC_COLLATE is not supported. '
'Current LC_COLLATE: %s' % db_info['lc_collate']
)
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
raise NotSupportedError(
'Changing LC_CTYPE is not supported.'
'Current LC_CTYPE: %s' % db_info['lc_ctype']
)
else:
changed = False
if owner and owner != db_info['owner']:
changed = set_owner(cursor, db, owner)
if conn_limit and conn_limit != str(db_info['conn_limit']):
changed = set_conn_limit(cursor, db, conn_limit)
if tablespace and tablespace != db_info['tablespace']:
changed = set_tablespace(cursor, db, tablespace)
return changed
def db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace):
if not db_exists(cursor, db):
return False
else:
db_info = get_db_info(cursor, db)
if (encoding and get_encoding_id(cursor, encoding) != db_info['encoding_id']):
return False
elif lc_collate and lc_collate != db_info['lc_collate']:
return False
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
return False
elif owner and owner != db_info['owner']:
return False
elif conn_limit and conn_limit != str(db_info['conn_limit']):
return False
elif tablespace and tablespace != db_info['tablespace']:
return False
else:
return True
def db_dump(module, target, target_opts="",
db=None,
dump_extra_args=None,
user=None,
password=None,
host=None,
port=None,
**kw):
flags = login_flags(db, host, port, user, db_prefix=False)
cmd = module.get_bin_path('pg_dump', True)
comp_prog_path = None
if os.path.splitext(target)[-1] == '.tar':
flags.append(' --format=t')
elif os.path.splitext(target)[-1] == '.pgc':
flags.append(' --format=c')
if os.path.splitext(target)[-1] == '.gz':
if module.get_bin_path('pigz'):
comp_prog_path = module.get_bin_path('pigz', True)
else:
comp_prog_path = module.get_bin_path('gzip', True)
elif os.path.splitext(target)[-1] == '.bz2':
comp_prog_path = module.get_bin_path('bzip2', True)
elif os.path.splitext(target)[-1] == '.xz':
comp_prog_path = module.get_bin_path('xz', True)
cmd += "".join(flags)
if dump_extra_args:
cmd += " {0} ".format(dump_extra_args)
if target_opts:
cmd += " {0} ".format(target_opts)
if comp_prog_path:
# Use a fifo to be notified of an error in pg_dump
# Using shell pipe has no way to return the code of the first command
# in a portable way.
fifo = os.path.join(module.tmpdir, 'pg_fifo')
os.mkfifo(fifo)
cmd = '{1} <{3} > {2} & {0} >{3}'.format(cmd, comp_prog_path, shlex_quote(target), fifo)
else:
cmd = '{0} > {1}'.format(cmd, shlex_quote(target))
return do_with_password(module, cmd, password)
def db_restore(module, target, target_opts="",
db=None,
user=None,
password=None,
host=None,
port=None,
**kw):
flags = login_flags(db, host, port, user)
comp_prog_path = None
cmd = module.get_bin_path('psql', True)
if os.path.splitext(target)[-1] == '.sql':
flags.append(' --file={0}'.format(target))
elif os.path.splitext(target)[-1] == '.tar':
flags.append(' --format=Tar')
cmd = module.get_bin_path('pg_restore', True)
elif os.path.splitext(target)[-1] == '.pgc':
flags.append(' --format=Custom')
cmd = module.get_bin_path('pg_restore', True)
elif os.path.splitext(target)[-1] == '.gz':
comp_prog_path = module.get_bin_path('zcat', True)
elif os.path.splitext(target)[-1] == '.bz2':
comp_prog_path = module.get_bin_path('bzcat', True)
elif os.path.splitext(target)[-1] == '.xz':
comp_prog_path = module.get_bin_path('xzcat', True)
cmd += "".join(flags)
if target_opts:
cmd += " {0} ".format(target_opts)
if comp_prog_path:
env = os.environ.copy()
if password:
env = {"PGPASSWORD": password}
p1 = subprocess.Popen([comp_prog_path, target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(cmd, stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=env)
(stdout2, stderr2) = p2.communicate()
p1.stdout.close()
p1.wait()
if p1.returncode != 0:
stderr1 = p1.stderr.read()
return p1.returncode, '', stderr1, 'cmd: ****'
else:
return p2.returncode, '', stderr2, 'cmd: ****'
else:
cmd = '{0} < {1}'.format(cmd, shlex_quote(target))
return do_with_password(module, cmd, password)
def login_flags(db, host, port, user, db_prefix=True):
"""
returns a list of connection argument strings each prefixed
with a space and quoted where necessary to later be combined
in a single shell string with `"".join(rv)`
db_prefix determines if "--dbname" is prefixed to the db argument,
since the argument was introduced in 9.3.
"""
flags = []
if db:
if db_prefix:
flags.append(' --dbname={0}'.format(shlex_quote(db)))
else:
flags.append(' {0}'.format(shlex_quote(db)))
if host:
flags.append(' --host={0}'.format(host))
if port:
flags.append(' --port={0}'.format(port))
if user:
flags.append(' --username={0}'.format(user))
return flags
def do_with_password(module, cmd, password):
env = {}
if password:
env = {"PGPASSWORD": password}
executed_commands.append(cmd)
rc, stderr, stdout = module.run_command(cmd, use_unsafe_shell=True, environ_update=env)
return rc, stderr, stdout, cmd
def set_tablespace(cursor, db, tablespace):
query = "ALTER DATABASE %s SET TABLESPACE %s" % (
pg_quote_identifier(db, 'database'),
pg_quote_identifier(tablespace, 'tablespace'))
executed_commands.append(query)
cursor.execute(query)
return True
# ===========================================
# Module execution.
#
def main():
argument_spec = pgutils.postgres_common_argument_spec()
argument_spec.update(
db=dict(type='str', required=True, aliases=['name']),
owner=dict(type='str', default=''),
template=dict(type='str', default=''),
encoding=dict(type='str', default=''),
lc_collate=dict(type='str', default=''),
lc_ctype=dict(type='str', default=''),
state=dict(type='str', default='present', choices=['absent', 'dump', 'present', 'restore']),
target=dict(type='path', default=''),
target_opts=dict(type='str', default=''),
maintenance_db=dict(type='str', default="postgres"),
session_role=dict(type='str'),
conn_limit=dict(type='str', default=''),
tablespace=dict(type='path', default=''),
dump_extra_args=dict(type='str', default=None),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
db = module.params["db"]
owner = module.params["owner"]
template = module.params["template"]
encoding = module.params["encoding"]
lc_collate = module.params["lc_collate"]
lc_ctype = module.params["lc_ctype"]
target = module.params["target"]
target_opts = module.params["target_opts"]
state = module.params["state"]
changed = False
maintenance_db = module.params['maintenance_db']
session_role = module.params["session_role"]
conn_limit = module.params['conn_limit']
tablespace = module.params['tablespace']
dump_extra_args = module.params['dump_extra_args']
raw_connection = state in ("dump", "restore")
if not raw_connection:
pgutils.ensure_required_libs(module)
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host": "host",
"login_user": "user",
"login_password": "password",
"port": "port",
"ssl_mode": "sslmode",
"ca_cert": "sslrootcert"
}
kw = dict((params_map[k], v) for (k, v) in iteritems(module.params)
if k in params_map and v != '' and v is not None)
# If a login_unix_socket is specified, incorporate it here.
is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
if is_localhost and module.params["login_unix_socket"] != "":
kw["host"] = module.params["login_unix_socket"]
if target == "":
target = "{0}/{1}.sql".format(os.getcwd(), db)
target = os.path.expanduser(target)
if not raw_connection:
try:
db_connection = psycopg2.connect(database=maintenance_db, **kw)
# Enable autocommit so we can create databases
if psycopg2.__version__ >= '2.4.2':
db_connection.autocommit = True
else:
db_connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
except TypeError as e:
if 'sslrootcert' in e.args[0]:
module.fail_json(msg='Postgresql server must be at least version 8.4 to support sslrootcert. Exception: {0}'.format(to_native(e)),
exception=traceback.format_exc())
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
except Exception as e:
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
if session_role:
try:
cursor.execute('SET ROLE "%s"' % session_role)
except Exception as e:
module.fail_json(msg="Could not switch role: %s" % to_native(e), exception=traceback.format_exc())
try:
if module.check_mode:
if state == "absent":
changed = db_exists(cursor, db)
elif state == "present":
changed = not db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace)
module.exit_json(changed=changed, db=db, executed_commands=executed_commands)
if state == "absent":
try:
changed = db_delete(cursor, db)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
elif state == "present":
try:
changed = db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
elif state in ("dump", "restore"):
method = state == "dump" and db_dump or db_restore
try:
if state == 'dump':
rc, stdout, stderr, cmd = method(module, target, target_opts, db, dump_extra_args, **kw)
else:
rc, stdout, stderr, cmd = method(module, target, target_opts, db, **kw)
if rc != 0:
module.fail_json(msg=stderr, stdout=stdout, rc=rc, cmd=cmd)
else:
module.exit_json(changed=True, msg=stdout, stderr=stderr, rc=rc, cmd=cmd,
executed_commands=executed_commands)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
except NotSupportedError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
except SystemExit:
# Avoid catching this on Python 2.4
raise
except Exception as e:
module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())
module.exit_json(changed=changed, db=db, executed_commands=executed_commands)
if __name__ == '__main__':
main()
| gpl-3.0 |
firmlyjin/brython | cgi-bin/upload_results.py | 17 | 1186 | #!/usr/bin/env python3
import cgi
import sqlite3
import json
print('Content-type: text/html')
print()
_form=cgi.FieldStorage()
_data=_form.getvalue('data')
_r=json.loads(_data)
_conn=sqlite3.connect("brython_speed_results.db")
_db=_conn.cursor()
try:
_db.execute("select count('x') from info")
except:
_db.execute("""create table info (userAgent text, brython_version text,
timestamp datetime)""")
try:
_db.execute("select count('x') from results")
except:
_db.execute("""create table results (id int, test_name text,
brython_ms int,
cpython_ms int)""")
_db.execute("""insert into info values (?,?,datetime('now'))""", (_r['userAgent'],
_r['brython_version']))
_id=_db.lastrowid
for _test in _r['timings']:
_t=_r['timings'][_test]
_db.execute(""" insert into results values (?,?,?,?)""", (_id, _test,
_t['brython'],
_t['cpython']))
_conn.commit()
_db.close()
print("OK")
| bsd-3-clause |
lihui7115/ChromiumGStreamerBackend | build/win/importlibs/create_importlib_win.py | 185 | 6790 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
"""Creates an import library from an import description file."""
import ast
import logging
import optparse
import os
import os.path
import shutil
import subprocess
import sys
import tempfile
_USAGE = """\
Usage: %prog [options] [imports-file]
Creates an import library from imports-file.
Note: this script uses the microsoft assembler (ml.exe) and the library tool
(lib.exe), both of which must be in path.
"""
_ASM_STUB_HEADER = """\
; This file is autogenerated by create_importlib_win.py, do not edit.
.386
.MODEL FLAT, C
.CODE
; Stubs to provide mangled names to lib.exe for the
; correct generation of import libs.
"""
_DEF_STUB_HEADER = """\
; This file is autogenerated by create_importlib_win.py, do not edit.
; Export declarations for generating import libs.
"""
_LOGGER = logging.getLogger()
class _Error(Exception):
pass
class _ImportLibraryGenerator(object):
def __init__(self, temp_dir):
self._temp_dir = temp_dir
def _Shell(self, cmd, **kw):
ret = subprocess.call(cmd, **kw)
_LOGGER.info('Running "%s" returned %d.', cmd, ret)
if ret != 0:
raise _Error('Command "%s" returned %d.' % (cmd, ret))
def _ReadImportsFile(self, imports_file):
# Slurp the imports file.
return ast.literal_eval(open(imports_file).read())
def _WriteStubsFile(self, import_names, output_file):
output_file.write(_ASM_STUB_HEADER)
for name in import_names:
output_file.write('%s PROC\n' % name)
output_file.write('%s ENDP\n' % name)
output_file.write('END\n')
def _WriteDefFile(self, dll_name, import_names, output_file):
output_file.write(_DEF_STUB_HEADER)
output_file.write('NAME %s\n' % dll_name)
output_file.write('EXPORTS\n')
for name in import_names:
name = name.split('@')[0]
output_file.write(' %s\n' % name)
def _CreateObj(self, dll_name, imports):
"""Writes an assembly file containing empty declarations.
For each imported function of the form:
AddClipboardFormatListener@4 PROC
AddClipboardFormatListener@4 ENDP
The resulting object file is then supplied to lib.exe with a .def file
declaring the corresponding non-adorned exports as they appear on the
exporting DLL, e.g.
EXPORTS
AddClipboardFormatListener
In combination, the .def file and the .obj file cause lib.exe to generate
an x86 import lib with public symbols named like
"__imp__AddClipboardFormatListener@4", binding to exports named like
"AddClipboardFormatListener".
All of this is perpetrated in a temporary directory, as the intermediate
artifacts are quick and easy to produce, and of no interest to anyone
after the fact."""
# Create an .asm file to provide stdcall-like stub names to lib.exe.
asm_name = dll_name + '.asm'
_LOGGER.info('Writing asm file "%s".', asm_name)
with open(os.path.join(self._temp_dir, asm_name), 'wb') as stubs_file:
self._WriteStubsFile(imports, stubs_file)
# Invoke on the assembler to compile it to .obj.
obj_name = dll_name + '.obj'
cmdline = ['ml.exe', '/nologo', '/c', asm_name, '/Fo', obj_name]
self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
return obj_name
def _CreateImportLib(self, dll_name, imports, architecture, output_file):
"""Creates an import lib binding imports to dll_name for architecture.
On success, writes the import library to output file.
"""
obj_file = None
# For x86 architecture we have to provide an object file for correct
# name mangling between the import stubs and the exported functions.
if architecture == 'x86':
obj_file = self._CreateObj(dll_name, imports)
# Create the corresponding .def file. This file has the non stdcall-adorned
# names, as exported by the destination DLL.
def_name = dll_name + '.def'
_LOGGER.info('Writing def file "%s".', def_name)
with open(os.path.join(self._temp_dir, def_name), 'wb') as def_file:
self._WriteDefFile(dll_name, imports, def_file)
# Invoke on lib.exe to create the import library.
# We generate everything into the temporary directory, as the .exp export
# files will be generated at the same path as the import library, and we
# don't want those files potentially gunking the works.
dll_base_name, ext = os.path.splitext(dll_name)
lib_name = dll_base_name + '.lib'
cmdline = ['lib.exe',
'/machine:%s' % architecture,
'/def:%s' % def_name,
'/out:%s' % lib_name]
if obj_file:
cmdline.append(obj_file)
self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
# Copy the .lib file to the output directory.
shutil.copyfile(os.path.join(self._temp_dir, lib_name), output_file)
_LOGGER.info('Created "%s".', output_file)
def CreateImportLib(self, imports_file, output_file):
# Read the imports file.
imports = self._ReadImportsFile(imports_file)
# Creates the requested import library in the output directory.
self._CreateImportLib(imports['dll_name'],
imports['imports'],
imports.get('architecture', 'x86'),
output_file)
def main():
parser = optparse.OptionParser(usage=_USAGE)
parser.add_option('-o', '--output-file',
help='Specifies the output file path.')
parser.add_option('-k', '--keep-temp-dir',
action='store_true',
help='Keep the temporary directory.')
parser.add_option('-v', '--verbose',
action='store_true',
help='Verbose logging.')
options, args = parser.parse_args()
if len(args) != 1:
parser.error('You must provide an imports file.')
if not options.output_file:
parser.error('You must provide an output file.')
options.output_file = os.path.abspath(options.output_file)
if options.verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.WARN)
temp_dir = tempfile.mkdtemp()
_LOGGER.info('Created temporary directory "%s."', temp_dir)
try:
# Create a generator and create the import lib.
generator = _ImportLibraryGenerator(temp_dir)
ret = generator.CreateImportLib(args[0], options.output_file)
except Exception, e:
_LOGGER.exception('Failed to create import lib.')
ret = 1
finally:
if not options.keep_temp_dir:
shutil.rmtree(temp_dir)
_LOGGER.info('Deleted temporary directory "%s."', temp_dir)
return ret
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
seckcoder/lang-learn | python/sklearn/sklearn/decomposition/tests/test_kernel_pca.py | 1 | 7069 | import numpy as np
import scipy.sparse as sp
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_raises
from sklearn.decomposition import PCA, KernelPCA
from sklearn.datasets import make_circles
from sklearn.linear_model import Perceptron
from sklearn.utils.testing import assert_less
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
from sklearn.metrics.pairwise import rbf_kernel
def test_kernel_pca():
rng = np.random.RandomState(0)
X_fit = rng.random_sample((5, 4))
X_pred = rng.random_sample((2, 4))
for eigen_solver in ("auto", "dense", "arpack"):
for kernel in ("linear", "rbf", "poly"):
# transform fit data
kpca = KernelPCA(4, kernel=kernel, eigen_solver=eigen_solver,
fit_inverse_transform=True)
X_fit_transformed = kpca.fit_transform(X_fit)
X_fit_transformed2 = kpca.fit(X_fit).transform(X_fit)
assert_array_almost_equal(np.abs(X_fit_transformed),
np.abs(X_fit_transformed2))
# transform new data
X_pred_transformed = kpca.transform(X_pred)
assert_equal(X_pred_transformed.shape[1],
X_fit_transformed.shape[1])
# inverse transform
X_pred2 = kpca.inverse_transform(X_pred_transformed)
assert_equal(X_pred2.shape, X_pred.shape)
def test_invalid_parameters():
assert_raises(ValueError, KernelPCA, 10, fit_inverse_transform=True,
kernel='precomputed')
def test_kernel_pca_sparse():
rng = np.random.RandomState(0)
X_fit = sp.csr_matrix(rng.random_sample((5, 4)))
X_pred = sp.csr_matrix(rng.random_sample((2, 4)))
for eigen_solver in ("auto", "arpack"):
for kernel in ("linear", "rbf", "poly"):
# transform fit data
kpca = KernelPCA(4, kernel=kernel, eigen_solver=eigen_solver,
fit_inverse_transform=False)
X_fit_transformed = kpca.fit_transform(X_fit)
X_fit_transformed2 = kpca.fit(X_fit).transform(X_fit)
assert_array_almost_equal(np.abs(X_fit_transformed),
np.abs(X_fit_transformed2))
# transform new data
X_pred_transformed = kpca.transform(X_pred)
assert_equal(X_pred_transformed.shape[1],
X_fit_transformed.shape[1])
# inverse transform
#X_pred2 = kpca.inverse_transform(X_pred_transformed)
#assert_equal(X_pred2.shape, X_pred.shape)
def test_kernel_pca_linear_kernel():
rng = np.random.RandomState(0)
X_fit = rng.random_sample((5, 4))
X_pred = rng.random_sample((2, 4))
# for a linear kernel, kernel PCA should find the same projection as PCA
# modulo the sign (direction)
# fit only the first four components: fifth is near zero eigenvalue, so
# can be trimmed due to roundoff error
assert_array_almost_equal(
np.abs(KernelPCA(4).fit(X_fit).transform(X_pred)),
np.abs(PCA(4).fit(X_fit).transform(X_pred)))
def test_kernel_pca_n_components():
rng = np.random.RandomState(0)
X_fit = rng.random_sample((5, 4))
X_pred = rng.random_sample((2, 4))
for eigen_solver in ("dense", "arpack"):
for c in [1, 2, 4]:
kpca = KernelPCA(n_components=c, eigen_solver=eigen_solver)
shape = kpca.fit(X_fit).transform(X_pred).shape
assert_equal(shape, (2, c))
def test_kernel_pca_precomputed():
rng = np.random.RandomState(0)
X_fit = rng.random_sample((5, 4))
X_pred = rng.random_sample((2, 4))
for eigen_solver in ("dense", "arpack"):
X_kpca = KernelPCA(4, eigen_solver=eigen_solver).\
fit(X_fit).transform(X_pred)
X_kpca2 = KernelPCA(4, eigen_solver=eigen_solver,
kernel='precomputed').fit(np.dot(X_fit,
X_fit.T)).transform(np.dot(X_pred, X_fit.T))
X_kpca_train = KernelPCA(4, eigen_solver=eigen_solver,
kernel='precomputed').fit_transform(np.dot(X_fit, X_fit.T))
X_kpca_train2 = KernelPCA(4, eigen_solver=eigen_solver,
kernel='precomputed').fit(np.dot(X_fit,
X_fit.T)).transform(np.dot(X_fit, X_fit.T))
assert_array_almost_equal(np.abs(X_kpca),
np.abs(X_kpca2))
assert_array_almost_equal(np.abs(X_kpca_train),
np.abs(X_kpca_train2))
def test_kernel_pca_invalid_kernel():
rng = np.random.RandomState(0)
X_fit = rng.random_sample((2, 4))
kpca = KernelPCA(kernel="tototiti")
assert_raises(ValueError, kpca.fit, X_fit)
def test_gridsearch_pipeline():
# Test if we can do a grid-search to find parameters to separate
# circles with a perceptron model.
X, y = make_circles(n_samples=400, factor=.3, noise=.05,
random_state=0)
kpca = KernelPCA(kernel="rbf", n_components=2)
pipeline = Pipeline([("kernel_pca", kpca), ("Perceptron", Perceptron())])
param_grid = dict(kernel_pca__gamma=2. ** np.arange(-2, 2))
grid_search = GridSearchCV(pipeline, cv=3, param_grid=param_grid)
grid_search.fit(X, y)
assert_equal(grid_search.best_score_, 1)
def test_gridsearch_pipeline_precomputed():
# Test if we can do a grid-search to find parameters to separate
# circles with a perceptron model using a precomputed kernel.
X, y = make_circles(n_samples=400, factor=.3, noise=.05,
random_state=0)
kpca = KernelPCA(kernel="precomputed", n_components=2)
pipeline = Pipeline([("kernel_pca", kpca), ("Perceptron", Perceptron())])
param_grid = dict(Perceptron__n_iter=np.arange(1, 5))
grid_search = GridSearchCV(pipeline, cv=3, param_grid=param_grid)
X_kernel = rbf_kernel(X, gamma=2.)
grid_search.fit(X_kernel, y)
assert_equal(grid_search.best_score_, 1)
def test_nested_circles():
"""Test the linear separability of the first 2D KPCA transform"""
X, y = make_circles(n_samples=400, factor=.3, noise=.05,
random_state=0)
# 2D nested circles are not linearly separable
train_score = Perceptron().fit(X, y).score(X, y)
assert_less(train_score, 0.8)
# Project the circles data into the first 2 components of a RBF Kernel
# PCA model.
# Note that the gamma value is data dependent. If this test breaks
# and the gamma value has to be updated, the Kernel PCA example will
# have to be updated too.
kpca = KernelPCA(kernel="rbf", n_components=2,
fit_inverse_transform=True, gamma=2.)
X_kpca = kpca.fit_transform(X)
# The data is perfectly linearly separable in that space
train_score = Perceptron().fit(X_kpca, y).score(X_kpca, y)
assert_equal(train_score, 1.0)
if __name__ == '__main__':
import nose
nose.run(argv=['', __file__])
| unlicense |
saurabh6790/test-med-app | support/doctype/customer_issue/customer_issue.py | 30 | 2037 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes import session, msgprint
from webnotes.utils import today
from utilities.transaction_base import TransactionBase
class DocType(TransactionBase):
def __init__(self, doc, doclist=[]):
self.doc = doc
self.doclist = doclist
def validate(self):
if session['user'] != 'Guest' and not self.doc.customer:
msgprint("Please select Customer from whom issue is raised",
raise_exception=True)
if self.doc.status=="Closed" and \
webnotes.conn.get_value("Customer Issue", self.doc.name, "status")!="Closed":
self.doc.resolution_date = today()
self.doc.resolved_by = webnotes.session.user
def on_cancel(self):
lst = webnotes.conn.sql("select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent = t1.name and t2.prevdoc_docname = '%s' and t1.docstatus!=2"%(self.doc.name))
if lst:
lst1 = ','.join([x[0] for x in lst])
msgprint("Maintenance Visit No. "+lst1+" already created against this customer issue. So can not be Cancelled")
raise Exception
else:
webnotes.conn.set(self.doc, 'status', 'Cancelled')
def on_update(self):
pass
@webnotes.whitelist()
def make_maintenance_visit(source_name, target_doclist=None):
from webnotes.model.mapper import get_mapped_doclist
visit = webnotes.conn.sql("""select t1.name
from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s
and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name)
if not visit:
doclist = get_mapped_doclist("Customer Issue", source_name, {
"Customer Issue": {
"doctype": "Maintenance Visit",
"field_map": {
"complaint": "description",
"doctype": "prevdoc_doctype",
"name": "prevdoc_docname"
}
}
}, target_doclist)
return [d.fields for d in doclist] | agpl-3.0 |
40223119/-2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/site-packages/docs.py | 624 | 1503 | from browser import document as doc
from browser import window, html, markdown
import highlight
import time
def run(ev):
# run the code in the elt after the button
ix = ev.target.parent.children.index(ev.target)
elt = ev.target.parent.children[ix+1]
exec(elt.text)
elt.focus()
def load(url,target):
# fake query string to bypass browser cache
qs = '?foo=%s' %time.time()
try:
mk,scripts = markdown.mark(open(url+qs).read())
except IOError:
doc[target].html = "Page %s not found" %url
return False
doc[target].html = mk
for script in scripts:
exec(script)
for elt in doc[target].get(selector='.exec'):
# Python code executed when user clicks on a button
elt.contentEditable = True
src = elt.text.strip()
h = highlight.highlight(src)
h.className = "pycode"
elt.clear()
elt <= h
elt.focus()
btn = html.BUTTON('▶')
btn.bind('click', run)
elt.parent.insertBefore(btn, elt)
for elt in doc[target].get(selector='.exec_on_load'):
# Python code executed on page load
src = elt.text.strip()
h = highlight.highlight(src)
h.className = "pycode"
elt.clear()
elt <= h
exec(src)
for elt in doc[target].get(selector='.python'):
src = elt.text.strip()
h = highlight.highlight(src)
h.className = "pycode"
elt.clear()
elt <= h
return False
| gpl-3.0 |
Hazelsuko07/17WarmingUp | py3.6/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py | 490 | 4141 | import calendar
import time
from email.utils import formatdate, parsedate, parsedate_tz
from datetime import datetime, timedelta
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
def expire_after(delta, date=None):
date = date or datetime.now()
return date + delta
def datetime_to_header(dt):
return formatdate(calendar.timegm(dt.timetuple()))
class BaseHeuristic(object):
def warning(self, response):
"""
Return a valid 1xx warning header value describing the cache
adjustments.
The response is provided too allow warnings like 113
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
to explicitly say response is over 24 hours old.
"""
return '110 - "Response is Stale"'
def update_headers(self, response):
"""Update the response headers with any new headers.
NOTE: This SHOULD always include some Warning header to
signify that the response was cached by the client, not
by way of the provided headers.
"""
return {}
def apply(self, response):
updated_headers = self.update_headers(response)
if updated_headers:
response.headers.update(updated_headers)
warning_header_value = self.warning(response)
if warning_header_value is not None:
response.headers.update({'Warning': warning_header_value})
return response
class OneDayCache(BaseHeuristic):
"""
Cache the response by providing an expires 1 day in the
future.
"""
def update_headers(self, response):
headers = {}
if 'expires' not in response.headers:
date = parsedate(response.headers['date'])
expires = expire_after(timedelta(days=1),
date=datetime(*date[:6]))
headers['expires'] = datetime_to_header(expires)
headers['cache-control'] = 'public'
return headers
class ExpiresAfter(BaseHeuristic):
"""
Cache **all** requests for a defined time period.
"""
def __init__(self, **kw):
self.delta = timedelta(**kw)
def update_headers(self, response):
expires = expire_after(self.delta)
return {
'expires': datetime_to_header(expires),
'cache-control': 'public',
}
def warning(self, response):
tmpl = '110 - Automatically cached for %s. Response might be stale'
return tmpl % self.delta
class LastModified(BaseHeuristic):
"""
If there is no Expires header already, fall back on Last-Modified
using the heuristic from
http://tools.ietf.org/html/rfc7234#section-4.2.2
to calculate a reasonable value.
Firefox also does something like this per
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
Unlike mozilla we limit this to 24-hr.
"""
cacheable_by_default_statuses = set([
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
])
def update_headers(self, resp):
headers = resp.headers
if 'expires' in headers:
return {}
if 'cache-control' in headers and headers['cache-control'] != 'public':
return {}
if resp.status not in self.cacheable_by_default_statuses:
return {}
if 'date' not in headers or 'last-modified' not in headers:
return {}
date = calendar.timegm(parsedate_tz(headers['date']))
last_modified = parsedate(headers['last-modified'])
if date is None or last_modified is None:
return {}
now = time.time()
current_age = max(0, now - date)
delta = date - calendar.timegm(last_modified)
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
if freshness_lifetime <= current_age:
return {}
expires = date + freshness_lifetime
return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
def warning(self, resp):
return None
| mit |
jonfoster/pyxb2 | tests/drivers/test-po1.py | 3 | 5914 | # -*- coding: utf-8 -*-
import logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../schemas/po1.xsd'))
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
#open('code.py', 'w').write(code)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
from pyxb.utils import domutils
def ToDOM (instance, dom_support=None):
return instance.toDOM(dom_support).documentElement
import unittest
class TestPO1 (unittest.TestCase):
street_content = '''95 Main St.
Anytown, AS 12345-6789'''
street_xmlt = u'<street>%s</street>' % (street_content,)
street_xmld = street_xmlt.encode('utf-8')
street_dom = pyxb.utils.domutils.StringToDOM(street_xmlt).documentElement
address1_xmlt = u'<name>Customer</name><street>95 Main St</street>'
address2_xmlt = u'<name>Sugar Mama</name><street>24 E. Dearling Ave.</street>'
def tearDown (self):
pyxb.RequireValidWhenGenerating(True)
pyxb.RequireValidWhenParsing(True)
def testPythonElementSimpleContent (self):
elt = USAddress._ElementMap['street'].elementBinding()(self.street_content)
self.assertEqual(self.street_content, elt)
self.assertEqual(ToDOM(elt).toxml("utf-8"), self.street_xmld)
def testDOMElementSimpleContent (self):
elt = USAddress._ElementMap['street'].elementBinding().createFromDOM(self.street_dom)
self.assertEqual(ToDOM(elt).toxml("utf-8"), self.street_xmld)
def testPythonElementComplexContent_Element (self):
addr = USAddress(name='Customer', street='95 Main St')
self.assertEqual('95 Main St', addr.street)
addr = USAddress('Customer', '95 Main St')
self.assertEqual('95 Main St', addr.street)
addr.street = '43 West Oak'
self.assertEqual('43 West Oak', addr.street)
def testDOM_CTD_element (self):
# NB: USAddress is a CTD, not an element.
xmlt = u'<shipTo>%s</shipTo>' % (self.address1_xmlt,)
xmld = xmlt.encode('utf-8')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
addr2 = USAddress.Factory(_dom_node=dom.documentElement)
def testPurchaseOrder (self):
po = purchaseOrder(shipTo=USAddress(name='Customer', street='95 Main St'),
billTo=USAddress(name='Sugar Mama', street='24 E. Dearling Ave'),
comment='Thanks!')
xmld = ToDOM(po).toxml("utf-8")
xml1t = '<ns1:purchaseOrder xmlns:ns1="http://www.example.com/PO1"><shipTo><name>Customer</name><street>95 Main St</street></shipTo><billTo><name>Sugar Mama</name><street>24 E. Dearling Ave</street></billTo><ns1:comment>Thanks!</ns1:comment></ns1:purchaseOrder>'
xml1d = xml1t.encode('utf-8')
self.assertEqual(xmld, xml1d)
dom = pyxb.utils.domutils.StringToDOM(xmld)
po2 = purchaseOrder.createFromDOM(dom.documentElement)
self.assertEqual(xml1d, ToDOM(po2).toxml("utf-8"))
loc = po2.shipTo._location()
self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (58 == loc.columnNumber))
loc = po2.billTo.name._location()
self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (131 == loc.columnNumber))
po2 = CreateFromDocument(xmld)
self.assertEqual(xml1d, ToDOM(po2).toxml("utf-8"))
loc = po2.shipTo._location()
self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (58 == loc.columnNumber))
loc = po2.billTo.name._location()
self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (131 == loc.columnNumber))
xml2t = '<purchaseOrder xmlns="http://www.example.com/PO1"><shipTo><name>Customer</name><street>95 Main St</street></shipTo><billTo><name>Sugar Mama</name><street>24 E. Dearling Ave</street></billTo><comment>Thanks!</comment></purchaseOrder>'
xml2d = xml2t.encode('utf-8')
bds = pyxb.utils.domutils.BindingDOMSupport()
bds.setDefaultNamespace(Namespace)
self.assertEqual(xml2d, ToDOM(po2, dom_support=bds).toxml("utf-8"))
def testGenerationValidation (self):
ship_to = USAddress('Robert Smith', 'General Delivery')
po = purchaseOrder(ship_to)
self.assertEqual('General Delivery', po.shipTo.street)
self.assertTrue(po.billTo is None)
self.assertTrue(pyxb.RequireValidWhenGenerating())
self.assertRaises(pyxb.IncompleteElementContentError, po.toxml)
try:
pyxb.RequireValidWhenGenerating(False)
self.assertFalse(pyxb.RequireValidWhenGenerating())
xmlt = u'<ns1:purchaseOrder xmlns:ns1="http://www.example.com/PO1"><shipTo><street>General Delivery</street><name>Robert Smith</name></shipTo></ns1:purchaseOrder>'
xmlta = u'<ns1:purchaseOrder xmlns:ns1="http://www.example.com/PO1"><shipTo><name>Robert Smith</name><street>General Delivery</street></shipTo></ns1:purchaseOrder>'
xmlds = [ _xmlt.encode('utf-8') for _xmlt in (xmlt, xmlta) ]
self.assertTrue(po.toxml("utf-8", root_only=True) in xmlds)
finally:
pyxb.RequireValidWhenGenerating(True)
self.assertRaises(pyxb.UnrecognizedContentError, CreateFromDocument, xmlt)
self.assertTrue(pyxb.RequireValidWhenParsing())
try:
pyxb.RequireValidWhenParsing(False)
self.assertFalse(pyxb.RequireValidWhenParsing())
po2 = CreateFromDocument(xmlt)
finally:
pyxb.RequireValidWhenParsing(True)
self.assertEqual('General Delivery', po2.shipTo.street)
self.assertTrue(po2.billTo is None)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
SUSE/azure-sdk-for-python | azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/operations/job_cancellations_operations.py | 2 | 4214 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class JobCancellationsOperations(object):
"""JobCancellationsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client Api Version. Constant value: "2016-12-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-12-01"
self.config = config
def trigger(
self, vault_name, resource_group_name, job_name, custom_headers=None, raw=False, **operation_config):
"""Cancels a job. This is an asynchronous operation. To know the status of
the cancellation, call GetCancelOperationResult API.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the
recovery services vault is present.
:type resource_group_name: str
:param job_name: Name of the job to cancel.
:type job_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupJobs/{jobName}/cancel'
path_format_arguments = {
'vaultName': self._serialize.url("vault_name", vault_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'jobName': self._serialize.url("job_name", job_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
| mit |
aznrice/l-preview | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <fweisbec@gmail.com>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 |
domino-team/openwrt-cc | package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/v8_inspector/third_party/jinja2/jinja2/__init__.py | 27 | 2330 | # -*- coding: utf-8 -*-
"""
jinja2
~~~~~~
Jinja2 is a template engine written in pure Python. It provides a
Django inspired non-XML syntax but supports inline expressions and
an optional sandboxed environment.
Nutshell
--------
Here a small example of a Jinja2 template::
{% extends 'base.html' %}
{% block title %}Memberlist{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
__docformat__ = 'restructuredtext en'
__version__ = '2.9.dev'
# high level interface
from jinja2.environment import Environment, Template
# loaders
from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
ModuleLoader
# bytecode caches
from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
MemcachedBytecodeCache
# undefined types
from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined, \
make_logging_undefined
# exceptions
from jinja2.exceptions import TemplateError, UndefinedError, \
TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
TemplateAssertionError
# decorators and public utilities
from jinja2.filters import environmentfilter, contextfilter, \
evalcontextfilter
from jinja2.utils import Markup, escape, clear_caches, \
environmentfunction, evalcontextfunction, contextfunction, \
is_undefined
__all__ = [
'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined',
]
| gpl-2.0 |
Innovahn/cybex | addons/hr_recruitment/wizard/__init__.py | 381 | 1095 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-Today OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_recruitment_create_partner_job
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Clever/Diamond | src/collectors/snmpraw/test/testsnmpraw.py | 29 | 1797 | #!/usr/bin/python
# coding=utf-8
###############################################################################
import time
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from snmpraw import SNMPRawCollector
from diamond.collector import Collector
###############################################################################
class TestSNMPRawCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SNMPRawCollector', {
})
self.collector = SNMPRawCollector(config, None)
def test_import(self):
self.assertTrue(SNMPRawCollector)
@patch.object(Collector, 'publish_metric')
@patch.object(time, 'time', Mock(return_value=1000))
@patch.object(SNMPRawCollector, '_get_value', Mock(return_value=5))
def test_metric(self, collect_mock):
test_config = {'devices': {'test': {'oids': {'1.1.1.1': 'test'}}}}
self.collector.config.update(test_config)
path = '.'.join([self.collector.config['path_prefix'], 'test',
self.collector.config['path_suffix'], 'test'])
self.collector.collect_snmp('test', None, None, None)
metric = collect_mock.call_args[0][0]
self.assertEqual(metric.metric_type, 'GAUGE')
self.assertEqual(metric.ttl, None)
self.assertEqual(metric.value, self.collector._get_value())
self.assertEqual(metric.precision, self.collector._precision(5))
self.assertEqual(metric.host, None)
self.assertEqual(metric.path, path)
self.assertEqual(metric.timestamp, 1000)
###############################################################################
if __name__ == "__main__":
unittest.main()
| mit |
Freestila/dosage | scripts/order-symlinks.py | 3 | 2039 | #!/usr/bin/env python
# Copyright (C) 2013 Tobias Gruetzmacher
"""
This script takes the JSON file created by 'dosage -o json' and uses the
metadata to build a symlink farm in the deduced order of the comic. It created
those in a subdirectory called 'inorder'.
"""
from __future__ import print_function
import sys
import os
import codecs
import json
def jsonFn(d):
"""Get JSON filename."""
return os.path.join(d, 'dosage.json')
def loadJson(d):
"""Return JSON data."""
with codecs.open(jsonFn(d), 'r', 'utf-8') as f:
return json.load(f)
def prepare_output(d):
"""Clean pre-existing links in output directory."""
outDir = os.path.join(d, 'inorder')
if not os.path.exists(outDir):
os.mkdir(outDir)
for f in os.listdir(outDir):
f = os.path.join(outDir, f)
if os.path.islink(f):
os.remove(f)
return outDir
def create_symlinks(d):
"""Create new symbolic links in output directory."""
data = loadJson(d)
outDir = prepare_output(d)
unseen = data["pages"].keys()
while len(unseen) > 0:
latest = work = unseen[0]
while work in unseen:
unseen.remove(work)
if "prev" in data["pages"][work]:
work = data["pages"][work]["prev"]
print("Latest page: %s" % (latest))
order = []
work = latest
while work in data["pages"]:
order.extend(data["pages"][work]["images"].values())
if "prev" in data["pages"][work]:
work = data["pages"][work]["prev"]
else:
work = None
order.reverse()
for i, img in enumerate(order):
os.symlink(os.path.join('..', img), os.path.join(outDir, '%05i_%s' % (i, img)))
if __name__ == '__main__':
if len(sys.argv) > 1:
for d in sys.argv[1:]:
if os.path.exists(jsonFn(d)):
create_symlinks(d)
else:
print("No JSON file found in '%s'." % (d))
else:
print("Usage: %s comic-dirs" % (os.path.basename(sys.argv[0])))
| mit |
ytjiang/django | tests/utils_tests/test_duration.py | 364 | 1677 | import datetime
import unittest
from django.utils.dateparse import parse_duration
from django.utils.duration import duration_string
class TestDurationString(unittest.TestCase):
def test_simple(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5)
self.assertEqual(duration_string(duration), '01:03:05')
def test_days(self):
duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5)
self.assertEqual(duration_string(duration), '1 01:03:05')
def test_microseconds(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345)
self.assertEqual(duration_string(duration), '01:03:05.012345')
def test_negative(self):
duration = datetime.timedelta(days=-1, hours=1, minutes=3, seconds=5)
self.assertEqual(duration_string(duration), '-1 01:03:05')
class TestParseDurationRoundtrip(unittest.TestCase):
def test_simple(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5)
self.assertEqual(parse_duration(duration_string(duration)), duration)
def test_days(self):
duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5)
self.assertEqual(parse_duration(duration_string(duration)), duration)
def test_microseconds(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345)
self.assertEqual(parse_duration(duration_string(duration)), duration)
def test_negative(self):
duration = datetime.timedelta(days=-1, hours=1, minutes=3, seconds=5)
self.assertEqual(parse_duration(duration_string(duration)), duration)
| bsd-3-clause |
spacecaker/android_kernel_acer_swing_msm8960_3.4.6 | scripts/gcc-wrapper.py | 501 | 3410 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Code Aurora nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import errno
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"alignment.c:327",
"mmu.c:602",
"return_address.c:62",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
if m and m.group(2) not in allowed_warnings:
print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
if ofile:
try:
os.remove(ofile)
except OSError:
pass
sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
try:
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
except OSError as e:
result = e.errno
if result == errno.ENOENT:
print args[0] + ':',e.strerror
print 'Is your PATH set correctly?'
else:
print ' '.join(args), str(e)
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
| gpl-2.0 |
jlew/Web2Py-Inventory | languages/pl.py | 16 | 4832 | # coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"Uaktualnij" jest dodatkowym wyra\xc5\xbceniem postaci "pole1=\'nowawarto\xc5\x9b\xc4\x87\'". Nie mo\xc5\xbcesz uaktualni\xc4\x87 lub usun\xc4\x85\xc4\x87 wynik\xc3\xb3w z JOIN:',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': 'Wierszy usuni\xc4\x99tych: %s',
'%s rows updated': 'Wierszy uaktualnionych: %s',
'Authentication': 'Uwierzytelnienie',
'Available databases and tables': 'Dost\xc4\x99pne bazy danych i tabele',
'Cannot be empty': 'Nie mo\xc5\xbce by\xc4\x87 puste',
'Change Password': 'Zmie\xc5\x84 has\xc5\x82o',
'Check to delete': 'Zaznacz aby usun\xc4\x85\xc4\x87',
'Check to delete:': 'Zaznacz aby usun\xc4\x85\xc4\x87:',
'Client IP': 'IP klienta',
'Controller': 'Kontroler',
'Copyright': 'Copyright',
'Current request': 'Aktualne \xc5\xbc\xc4\x85danie',
'Current response': 'Aktualna odpowied\xc5\xba',
'Current session': 'Aktualna sesja',
'DB Model': 'Model bazy danych',
'Database': 'Baza danych',
'Delete:': 'Usu\xc5\x84:',
'Description': 'Opis',
'E-mail': 'Adres e-mail',
'Edit': 'Edycja',
'Edit Profile': 'Edytuj profil',
'Edit This App': 'Edytuj t\xc4\x99 aplikacj\xc4\x99',
'Edit current record': 'Edytuj obecny rekord',
'First name': 'Imi\xc4\x99',
'Function disabled': 'Funkcja wy\xc5\x82\xc4\x85czona',
'Group ID': 'ID grupy',
'Hello World': 'Witaj \xc5\x9awiecie',
'Import/Export': 'Importuj/eksportuj',
'Index': 'Indeks',
'Internal State': 'Stan wewn\xc4\x99trzny',
'Invalid Query': 'B\xc5\x82\xc4\x99dne zapytanie',
'Invalid email': 'B\xc5\x82\xc4\x99dny adres email',
'Last name': 'Nazwisko',
'Layout': 'Uk\xc5\x82ad',
'Login': 'Zaloguj',
'Logout': 'Wyloguj',
'Lost Password': 'Przypomnij has\xc5\x82o',
'Main Menu': 'Menu g\xc5\x82\xc3\xb3wne',
'Menu Model': 'Model menu',
'Name': 'Nazwa',
'New Record': 'Nowy rekord',
'No databases in this application': 'Brak baz danych w tej aplikacji',
'Origin': '\xc5\xb9r\xc3\xb3d\xc5\x82o',
'Password': 'Has\xc5\x82o',
"Password fields don't match": 'Pola has\xc5\x82a nie s\xc4\x85 zgodne ze sob\xc4\x85',
'Powered by': 'Zasilane przez',
'Query:': 'Zapytanie:',
'Record ID': 'ID rekordu',
'Register': 'Zarejestruj',
'Registration key': 'Klucz rejestracji',
'Role': 'Rola',
'Rows in table': 'Wiersze w tabeli',
'Rows selected': 'Wybrane wiersze',
'Stylesheet': 'Arkusz styl\xc3\xb3w',
'Submit': 'Wy\xc5\x9blij',
'Sure you want to delete this object?': 'Czy na pewno chcesz usun\xc4\x85\xc4\x87 ten obiekt?',
'Table name': 'Nazwa tabeli',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Zapytanie" jest warunkiem postaci "db.tabela1.pole1==\'warto\xc5\x9b\xc4\x87\'". Takie co\xc5\x9b jak "db.tabela1.pole1==db.tabela2.pole2" oznacza SQL JOIN.',
'Timestamp': 'Znacznik czasu',
'Update:': 'Uaktualnij:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'U\xc5\xbcyj (...)&(...) jako AND, (...)|(...) jako OR oraz ~(...) jako NOT do tworzenia bardziej skomplikowanych zapyta\xc5\x84.',
'User %(id)s Registered': 'U\xc5\xbcytkownik %(id)s zosta\xc5\x82 zarejestrowany',
'User ID': 'ID u\xc5\xbcytkownika',
'Verify Password': 'Potwierd\xc5\xba has\xc5\x82o',
'View': 'Widok',
'Welcome %s': 'Welcome %s',
'Welcome to web2py': 'Witaj w web2py',
'appadmin is disabled because insecure channel': 'administracja aplikacji wy\xc5\x82\xc4\x85czona z powodu braku bezpiecznego po\xc5\x82\xc4\x85czenia',
'cache': 'cache',
'change password': 'change password',
'click here for online examples': 'Kliknij aby przej\xc5\x9b\xc4\x87 do interaktywnych przyk\xc5\x82ad\xc3\xb3w',
'click here for the administrative interface': 'Kliknij aby przej\xc5\x9b\xc4\x87 do panelu administracyjnego',
'customize me!': 'dostosuj mnie!',
'data uploaded': 'dane wys\xc5\x82ane',
'database': 'baza danych',
'database %s select': 'wyb\xc3\xb3r z bazy danych %s',
'db': 'baza danych',
'design': 'projektuj',
'done!': 'zrobione!',
'edit profile': 'edit profile',
'export as csv file': 'eksportuj jako plik csv',
'insert new': 'wstaw nowy rekord tabeli',
'insert new %s': 'wstaw nowy rekord do tabeli %s',
'invalid request': 'B\xc5\x82\xc4\x99dne \xc5\xbc\xc4\x85danie',
'login': 'login',
'logout': 'logout',
'new record inserted': 'nowy rekord zosta\xc5\x82 wstawiony',
'next 100 rows': 'nast\xc4\x99pne 100 wierszy',
'or import from csv file': 'lub zaimportuj z pliku csv',
'previous 100 rows': 'poprzednie 100 wierszy',
'record': 'rekord',
'record does not exist': 'rekord nie istnieje',
'record id': 'id rekordu',
'register': 'register',
'selected': 'wybranych',
'state': 'stan',
'table': 'tabela',
'unable to parse csv file': 'nie mo\xc5\xbcna sparsowa\xc4\x87 pliku csv',
}
| gpl-3.0 |
randyli/beauty-cpp | template/cgi-bin/menu.py | 1 | 3251 | #!/usr/bin/python
# -*- coding:utf-8 -*-
import cgi
print "Content-Type: text/html\n\n"
projects = [
('/memcached/', 'memcached')
]
form = cgi.FieldStorage()
directory = ''
if form.has_key('dir'):
directory = form['dir'].value
if directory == '/':
print '<ul class="jqueryFileTree" style="display: none;">',
print '<li class="directory collapsed"><a href="#" rel="/memcached/">memcached</a></li>'
print '</ul>'
elif directory == '/memcached/':
print '<ul class="jqueryFileTree" style="display: none;">',
print '<li class="file ext_h"><a href="/data/__assoc_h.html" rel="/memcached/__assoc_h.html" target="content">assoc.h</a></li>'
print '<li class="file ext_c"><a href="/data/assoc_c.html" rel="/memcached/assoc_c.html" target="content">assoc.c</a></li>'
print '<li class="file ext_h"><a href="/data/__cache_h.html" rel="/memcached/__cache_h.html" target="content">cache.h</a></li>'
print '<li class="file ext_c"><a href="/data/cache_c.html" rel="/memcached/cache_c.html" target="content">cache.c</a></li>'
print '<li class="file ext_c"><a href="/data/daemon_c.html" rel="/memcached/daemon_c.html" target="content">daemon.c</a></li>'
print '<li class="file ext_h"><a href="/data/__hash_h.html" rel="/memcached/__hash_h.html" target="content">hash.h</a></li>'
print '<li class="file ext_c"><a href="/data/hash_c.html" rel="/memcached/hash_c.html" target="content">hash.c</a></li>'
print '<li class="file ext_h"><a href="/data/__items_h.html" rel="/memcached/__items_h.html" target="content">items.h</a></li>'
print '<li class="file ext_c"><a href="/data/items_c.html" rel="/memcached/items_c.html" target="content">items.c</a></li>'
print '<li class="file ext_h"><a href="/data/__memcached_h.html" rel="/memcached/__memcached_h.html" target="content">memcached.h</a></li>'
print '<li class="file ext_c"><a href="/data/memcached_c.html" rel="/memcached/memcached_c.html" target="content">memcached.c</a></li>'
print '<li class="file ext_c"><a href="/data/sasl_defs_c.html" rel="/memcached/sasl_defs_c.html" target="content">sasl_defs.c</a></li>'
print '<li class="file ext_c"><a href="/data/sizes_c.html" rel="/memcached/sizes_c.html" target="content">sizes.c</a></li>'
print '<li class="file ext_h"><a href="/data/__slabs_h.html" rel="/memcached/__slabs_h.html" target="content">slabs.h</a></li>'
print '<li class="file ext_c"><a href="/data/slabs_c.html" rel="/memcached/slabs_c.html" target="content">slabs.c</a></li>'
print '<li class="file ext_h"><a href="/data/__stats_c.html" rel="/memcached/__stats_h.html" target="content">stats.h</a></li>'
print '<li class="file ext_c"><a href="/data/stats_c.html" rel="/memcached/stats_c.html" target="content">stats.c</a></li>'
print '<li class="file ext_c"><a href="/data/thread_c.html" rel="/memcached/thread_c.html" target="content">thread.c</a></li>'
print '<li class="file ext_c"><a href="/data/timed_c.html" rel="/memcached/timedrun_c.html" target="content">timedrun.c</a></li>'
print '<li class="file ext_h"><a href="/data/__util_c.html" rel="/memcached/__util_h.html" target="content">util.h</a></li>'
print '<li class="file ext_c"><a href="/data/utile_c.html" rel="/memcached/util_c.html" target="content">util.c</a></li>'
print '</ul>'
| mit |
chrisdunelm/grpc | src/python/grpcio_tests/tests/unit/beta/_utilities_test.py | 22 | 3382 | # Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of grpc.beta.utilities."""
import threading
import time
import unittest
from grpc.beta import implementations
from grpc.beta import utilities
from grpc.framework.foundation import future
from tests.unit.framework.common import test_constants
class _Callback(object):
def __init__(self):
self._condition = threading.Condition()
self._value = None
def accept_value(self, value):
with self._condition:
self._value = value
self._condition.notify_all()
def block_until_called(self):
with self._condition:
while self._value is None:
self._condition.wait()
return self._value
class ChannelConnectivityTest(unittest.TestCase):
def test_lonely_channel_connectivity(self):
channel = implementations.insecure_channel('localhost', 12345)
callback = _Callback()
ready_future = utilities.channel_ready_future(channel)
ready_future.add_done_callback(callback.accept_value)
with self.assertRaises(future.TimeoutError):
ready_future.result(timeout=test_constants.SHORT_TIMEOUT)
self.assertFalse(ready_future.cancelled())
self.assertFalse(ready_future.done())
self.assertTrue(ready_future.running())
ready_future.cancel()
value_passed_to_callback = callback.block_until_called()
self.assertIs(ready_future, value_passed_to_callback)
self.assertTrue(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
def test_immediately_connectable_channel_connectivity(self):
server = implementations.server({})
port = server.add_insecure_port('[::]:0')
server.start()
channel = implementations.insecure_channel('localhost', port)
callback = _Callback()
try:
ready_future = utilities.channel_ready_future(channel)
ready_future.add_done_callback(callback.accept_value)
self.assertIsNone(
ready_future.result(timeout=test_constants.LONG_TIMEOUT))
value_passed_to_callback = callback.block_until_called()
self.assertIs(ready_future, value_passed_to_callback)
self.assertFalse(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
# Cancellation after maturity has no effect.
ready_future.cancel()
self.assertFalse(ready_future.cancelled())
self.assertTrue(ready_future.done())
self.assertFalse(ready_future.running())
finally:
ready_future.cancel()
server.stop(0)
if __name__ == '__main__':
unittest.main(verbosity=2)
| apache-2.0 |
erikriver/eduIntelligent-cynin | src/eduintelligent.courses/eduintelligent/courses/content/coursecontent.py | 1 | 1190 | """Definition of the Lessons content type.
"""
from zope.interface import implements
from Products.Archetypes import atapi
from Products.ATContentTypes.content import folder
from Products.ATContentTypes.content.schemata import finalizeATCTSchema
from eduintelligent.courses.interfaces import ICourseContent
from eduintelligent.courses.config import PROJECTNAME
from eduintelligent.courses.utility import hideMetadataSchema
from eduintelligent.courses import coursesMessageFactory as _
CourseContentSchema = folder.ATFolderSchema.copy()
CourseContentSchema['title'].storage = atapi.AnnotationStorage()
CourseContentSchema['description'].storage = atapi.AnnotationStorage()
finalizeATCTSchema(CourseContentSchema, folderish=True, moveDiscussion=False)
hideMetadataSchema(CourseContentSchema, excludeFromNav=True)
class CourseContent(folder.ATFolder):
"""Contains multiple lessons.
"""
implements(ICourseContent)
portal_type = "CourseContent"
_at_rename_after_creation = True
schema = CourseContentSchema
title = atapi.ATFieldProperty('title')
description = atapi.ATFieldProperty('description')
atapi.registerType(CourseContent, PROJECTNAME)
| gpl-3.0 |
Antiun/project | project_functional_block/__openerp__.py | 22 | 1475 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Akretion LDTA (<http://www.akretion.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Project requiring functional blocks',
'version': '1.2',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'www.akretion.com',
'license': 'AGPL-3',
'category': 'Generic Modules',
'description': """
Adds functional blocks to organize the projects tasks.
""",
'depends': [
'project',
],
'data': [
'security/ir.model.access.csv',
'project_view.xml',
],
'demo': [
'project_demo.xml',
],
'installable': False,
'application': True,
}
| agpl-3.0 |
tkelman/utf8rewind | tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py | 102 | 1236 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a target and a subsidiary dependent target from a
.gyp file in a subdirectory, without specifying an explicit output build
directory, and using the generated solution or project file at the top
of the tree as the entry point.
The configuration sets the Xcode SYMROOT variable and uses --depth=
to make Xcode behave like the other build tools--that is, put all
built targets in a single output build directory at the top of the tree.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
test.relocate('src', 'relocate/src')
# Suppress the test infrastructure's setting SYMROOT on the command line.
test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
test.run_built_executable('prog1',
stdout="Hello from prog1.c\n",
chdir='relocate/src')
test.run_built_executable('prog2',
stdout="Hello from prog2.c\n",
chdir='relocate/src')
test.pass_test()
| mit |
frankvdp/django | tests/i18n/test_compilation.py | 15 | 7840 | import gettext as gettext_module
import os
import stat
import unittest
from io import StringIO
from subprocess import Popen
from unittest import mock
from django.core.management import (
CommandError, call_command, execute_from_command_line,
)
from django.core.management.commands.makemessages import (
Command as MakeMessagesCommand,
)
from django.core.management.utils import find_command
from django.test import SimpleTestCase, override_settings
from django.test.utils import captured_stderr, captured_stdout
from django.utils import translation
from django.utils.translation import gettext
from .utils import RunInTmpDirMixin, copytree
has_msgfmt = find_command('msgfmt')
@unittest.skipUnless(has_msgfmt, 'msgfmt is mandatory for compilation tests')
class MessageCompilationTests(RunInTmpDirMixin, SimpleTestCase):
work_subdir = 'commands'
class PoFileTests(MessageCompilationTests):
LOCALE = 'es_AR'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
def test_bom_rejection(self):
stderr = StringIO()
with self.assertRaisesMessage(CommandError, 'compilemessages generated one or more errors.'):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO(), stderr=stderr)
self.assertIn('file has a BOM (Byte Order Mark)', stderr.getvalue())
self.assertFalse(os.path.exists(self.MO_FILE))
def test_no_write_access(self):
mo_file_en = 'locale/en/LC_MESSAGES/django.mo'
err_buffer = StringIO()
# put file in read-only mode
old_mode = os.stat(mo_file_en).st_mode
os.chmod(mo_file_en, stat.S_IREAD)
try:
with self.assertRaisesMessage(CommandError, 'compilemessages generated one or more errors.'):
call_command('compilemessages', locale=['en'], stderr=err_buffer, verbosity=0)
self.assertIn('not writable location', err_buffer.getvalue())
finally:
os.chmod(mo_file_en, old_mode)
class PoFileContentsTests(MessageCompilationTests):
# Ticket #11240
LOCALE = 'fr'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
def test_percent_symbol_in_po_file(self):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE))
class MultipleLocaleCompilationTests(MessageCompilationTests):
MO_FILE_HR = None
MO_FILE_FR = None
def setUp(self):
super().setUp()
localedir = os.path.join(self.test_dir, 'locale')
self.MO_FILE_HR = os.path.join(localedir, 'hr/LC_MESSAGES/django.mo')
self.MO_FILE_FR = os.path.join(localedir, 'fr/LC_MESSAGES/django.mo')
def test_one_locale(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=['hr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE_HR))
def test_multiple_locales(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=['hr', 'fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE_HR))
self.assertTrue(os.path.exists(self.MO_FILE_FR))
class ExcludedLocaleCompilationTests(MessageCompilationTests):
work_subdir = 'exclude'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo'
def setUp(self):
super().setUp()
copytree('canned_locale', 'locale')
def test_command_help(self):
with captured_stdout(), captured_stderr():
# `call_command` bypasses the parser; by calling
# `execute_from_command_line` with the help subcommand we
# ensure that there are no issues with the parser itself.
execute_from_command_line(['django-admin', 'help', 'compilemessages'])
def test_one_locale_excluded(self):
call_command('compilemessages', exclude=['it'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertTrue(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_multiple_locales_excluded(self):
call_command('compilemessages', exclude=['it', 'fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_one_locale_excluded_with_locale(self):
call_command('compilemessages', locale=['en', 'fr'], exclude=['fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_multiple_locales_excluded_with_locale(self):
call_command('compilemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'],
stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
class CompilationErrorHandling(MessageCompilationTests):
def test_error_reported_by_msgfmt(self):
# po file contains wrong po formatting.
with self.assertRaises(CommandError):
call_command('compilemessages', locale=['ja'], verbosity=0, stderr=StringIO())
def test_msgfmt_error_including_non_ascii(self):
# po file contains invalid msgstr content (triggers non-ascii error content).
# Make sure the output of msgfmt is unaffected by the current locale.
env = os.environ.copy()
env.update({'LANG': 'C'})
with mock.patch('django.core.management.utils.Popen', lambda *args, **kwargs: Popen(*args, env=env, **kwargs)):
cmd = MakeMessagesCommand()
if cmd.gettext_version < (0, 18, 3):
self.skipTest("python-brace-format is a recent gettext addition.")
stderr = StringIO()
with self.assertRaisesMessage(CommandError, 'compilemessages generated one or more errors'):
call_command('compilemessages', locale=['ko'], stdout=StringIO(), stderr=stderr)
self.assertIn("' cannot start a field name", stderr.getvalue())
class ProjectAndAppTests(MessageCompilationTests):
LOCALE = 'ru'
PROJECT_MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
APP_MO_FILE = 'app_with_locale/locale/%s/LC_MESSAGES/django.mo' % LOCALE
class FuzzyTranslationTest(ProjectAndAppTests):
def setUp(self):
super().setUp()
gettext_module._translations = {} # flush cache or test will be useless
def test_nofuzzy_compiling(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
with translation.override(self.LOCALE):
self.assertEqual(gettext('Lenin'), 'Ленин')
self.assertEqual(gettext('Vodka'), 'Vodka')
def test_fuzzy_compiling(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=[self.LOCALE], fuzzy=True, stdout=StringIO())
with translation.override(self.LOCALE):
self.assertEqual(gettext('Lenin'), 'Ленин')
self.assertEqual(gettext('Vodka'), 'Водка')
class AppCompilationTest(ProjectAndAppTests):
def test_app_locale_compiled(self):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
self.assertTrue(os.path.exists(self.PROJECT_MO_FILE))
self.assertTrue(os.path.exists(self.APP_MO_FILE))
| bsd-3-clause |
pwarren/AGDeviceControl | agdevicecontrol/thirdparty/site-packages/darwin/twisted/python/logfile.py | 3 | 7578 | # -*- test-case-name: twisted.test.test_logfile -*-
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A rotating, browsable log file.
"""
# System Imports
import os, stat, glob, string, time
# sibling imports
import threadable
class BaseLogFile:
"""The base class for a log file that can be rotated.
"""
synchronized = ["write", "rotate"]
def __init__(self, name, directory, defaultMode=None):
self.directory = directory
assert os.path.isdir(self.directory)
self.name = name
self.path = os.path.join(directory, name)
if defaultMode is None and os.path.exists(self.path) and hasattr(os, "chmod"):
self.defaultMode = os.stat(self.path)[0]
else:
self.defaultMode = defaultMode
self._openFile()
def shouldRotate(self):
"""Override with a method to that returns true if the log
should be rotated"""
raise NotImplementedError
def _openFile(self):
"""Open the log file."""
self.closed = 0
if os.path.exists(self.path):
self._file = open(self.path, "r+", 1)
self._file.seek(0, 2)
else:
self._file = open(self.path, "w+", 1)
# set umask to be same as original log file
if self.defaultMode is not None:
try:
os.chmod(self.path, self.defaultMode)
except OSError:
# Probably /dev/null or something?
pass
def __getstate__(self):
state = self.__dict__.copy()
del state["_file"]
return state
def __setstate__(self, state):
self.__dict__ = state
self._openFile()
def write(self, data):
"""Write some data to the file."""
if self.shouldRotate():
self.flush()
self.rotate()
self._file.write(data)
def flush(self):
"""Flush the file."""
self._file.flush()
def close(self):
"""Close the file.
The file cannot be used once it has been closed.
"""
self.closed = 1
self._file.close()
self._file = None
def getCurrentLog(self):
"""Return a LogReader for the current log file."""
return LogReader(self.path)
class LogFile(BaseLogFile):
"""A log file that can be rotated.
A rotateLength of None disables automatic log rotation.
"""
def __init__(self, name, directory, rotateLength=1000000, defaultMode=None):
BaseLogFile.__init__(self, name, directory, defaultMode)
self.rotateLength = rotateLength
def _openFile(self):
BaseLogFile._openFile(self)
self.size = self._file.tell()
def shouldRotate(self):
"""Rotate when the log file size is larger than rotateLength"""
return self.rotateLength and self.size >= self.rotateLength
def getLog(self, identifier):
"""Given an integer, return a LogReader for an old log file."""
filename = "%s.%d" % (self.path, identifier)
if not os.path.exists(filename):
raise ValueError, "no such logfile exists"
return LogReader(filename)
def write(self, data):
"""Write some data to the file"""
BaseLogFile.write(self, data)
self.size += len(data)
def rotate(self):
"""Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
logs = self.listLogs()
logs.reverse()
for i in logs:
os.rename("%s.%d" % (self.path, i), "%s.%d" % (self.path, i + 1))
self._file.close()
os.rename(self.path, "%s.1" % self.path)
self._openFile()
def listLogs(self):
"""Return sorted list of integers - the old logs' identifiers."""
result = []
for name in glob.glob("%s.*" % self.path):
try:
counter = int(string.split(name, '.')[-1])
if counter:
result.append(counter)
except ValueError:
pass
result.sort()
return result
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["size"]
return state
threadable.synchronize(LogFile)
class DailyLogFile(BaseLogFile):
"""A log file that is rotated daily (at or after midnight localtime)
"""
def _openFile(self):
BaseLogFile._openFile(self)
self.lastDate = self.toDate(os.stat(self.path)[8])
def shouldRotate(self):
"""Rotate when the date has changed since last write"""
return self.toDate() > self.lastDate
def toDate(self, *args):
"""Convert a unixtime to (year, month, day) localtime tuple,
or return the current (year, month, day) localtime tuple.
This function primarily exists so you may overload it with
gmtime, or some cruft to make unit testing possible.
"""
# primarily so this can be unit tested easily
return time.localtime(*args)[:3]
def suffix(self, tupledate):
"""Return the suffix given a (year, month, day) tuple or unixtime"""
try:
return '_'.join(map(str, tupledate))
except:
# try taking a float unixtime
return '_'.join(map(str, self.toDate(tupledate)))
def getLog(self, identifier):
"""Given a unix time, return a LogReader for an old log file."""
if self.toDate(identifier) == self.lastDate:
return self.getCurrentLog()
filename = "%s.%s" % (self.path, self.suffix(identifier))
if not os.path.exists(filename):
raise ValueError, "no such logfile exists"
return LogReader(filename)
def write(self, data):
"""Write some data to the log file"""
BaseLogFile.write(self, data)
# Guard against a corner case where time.time()
# could potentially run backwards to yesterday.
# Primarily due to network time.
self.lastDate = max(self.lastDate, self.toDate())
def rotate(self):
"""Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
newpath = "%s.%s" % (self.path, self.suffix(self.lastDate))
if os.path.exists(newpath):
return
self._file.close()
os.rename(self.path, newpath)
self._openFile()
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["lastDate"]
return state
threadable.synchronize(DailyLogFile)
class LogReader:
"""Read from a log file."""
def __init__(self, name):
self._file = open(name, "r")
def readLines(self, lines=10):
"""Read a list of lines from the log file.
This doesn't returns all of the files lines - call it multiple times.
"""
result = []
for i in range(lines):
line = self._file.readline()
if not line:
break
result.append(line)
return result
def close(self):
self._file.close()
| gpl-2.0 |
Gazzonyx/samba | python/examples/netbios.py | 66 | 1049 | #!/usr/bin/env python
# Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2008
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from samba.netbios import Node
n = Node()
(reply_from, names, addresses) = n.query_name("GANIEDA", "192.168.4.0",
timeout=4)
print "Received reply from %s:" % (reply_from, )
print "Names: %r" % (names, )
print "Addresses: %r" % (addresses, )
| gpl-3.0 |
Distrotech/scons | test/CPPPATH/list-expansion.py | 5 | 3385 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that expansion of construction variables whose values are
lists works as expected within a $CPPPATH list definition.
Previously, the stringification of the expansion of the individual
variables would turn a list like ['sub1', 'sub2'] below into "-Isub1 sub2"
on the command line.
Test case courtesy Daniel Svensson.
"""
import TestSCons
test = TestSCons.TestSCons()
test.subdir('sub1', 'sub2', 'sub3', 'sub4')
test.write('SConstruct', """\
class _inc_test(object):
def __init__(self, name):
self.name = name
def __call__(self, target, source, env, for_signature):
return env.something[self.name]
env = Environment()
env.something = {}
env.something['test'] = ['sub1', 'sub2']
env['INC_PATHS1'] = _inc_test
env['INC_PATHS2'] = ['sub3', 'sub4']
env.Append(CPPPATH = ['${INC_PATHS1("test")}', '$INC_PATHS2'])
env.Program('test', 'test.c')
""")
test.write('test.c', """\
#include <stdio.h>
#include <stdlib.h>
#include "string1.h"
#include "string2.h"
#include "string3.h"
#include "string4.h"
int
main(int argc, char *argv[])
{
argv[argc++] = "--";
printf("test.c\\n");
printf("%s\\n", STRING1);
printf("%s\\n", STRING2);
printf("%s\\n", STRING3);
printf("%s\\n", STRING4);
exit (0);
}
""")
test.write(['sub1', 'string1.h'], """\
#define STRING1 "sub1/string1.h"
""")
test.write(['sub2', 'string2.h'], """\
#define STRING2 "sub2/string2.h"
""")
test.write(['sub3', 'string3.h'], """\
#define STRING3 "sub3/string3.h"
""")
test.write(['sub4', 'string4.h'], """\
#define STRING4 "sub4/string4.h"
""")
test.run()
test.up_to_date(arguments = '.')
expect = """\
test.c
sub1/string1.h
sub2/string2.h
sub3/string3.h
sub4/string4.h
"""
test.run(program = test.workpath('test' + TestSCons._exe), stdout=expect)
test.write(['sub2', 'string2.h'], """\
#define STRING2 "sub2/string2.h 2"
""")
test.not_up_to_date(arguments = '.')
expect = """\
test.c
sub1/string1.h
sub2/string2.h 2
sub3/string3.h
sub4/string4.h
"""
test.run(program = test.workpath('test' + TestSCons._exe), stdout=expect)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
HELIO-HFC/SDOSS | lib/python/net/pycurl.py | 1 | 1034 | #! /usr/bin/env python
import sys
import subprocess
import argparse
def pycurl(url, *args, **kwargs):
cmd = "curl"
if (args):
for arg in args: cmd += " "+arg
if (kwargs):
for k,v in kwargs.iteritems():
if (len(k) == 1):
if (v): cmd += " -"+k+" "+v
elif (len(k) > 1):
if (v): cmd += " --"+k+" "+v
cmd += " "+url
proc = subprocess.Popen(cmd,shell=True,
stdout=subprocess.PIPE,stderr=subprocess.PIPE)
output, error = proc.communicate()
proc.wait()
return output, error
if (__name__ == "__main__"):
parser = argparse.ArgumentParser()
parser.add_argument('url',nargs=1,help="url to fetch")
Namespace = parser.parse_args()
url = Namespace.url[0]
del Namespace.url
keys = {}
for k,v in Namespace.__dict__.items(): keys[k] = v
output, error = pycurl(url,**keys)
print "output:"
print output
print "error:"
print error
| mit |
CloudWareChile/OpenChile | openerp/addons/product/partner.py | 9 | 1587 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class res_partner(osv.osv):
_name = 'res.partner'
_inherit = 'res.partner'
_columns = {
'property_product_pricelist': fields.property(
'product.pricelist',
type='many2one',
relation='product.pricelist',
domain=[('type','=','sale')],
string="Sale Pricelist",
view_load=True,
help="This pricelist will be used, instead of the default one, for sales to the current partner"),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dmacvicar/spacewalk | backend/server/test/test_leak2.py | 2 | 1127 | #
# Copyright (c) 2008 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import os
import sys
import time
from DCOracle import Connect
print os.getpid()
dbh = Connect('rhnuser/rhnuser@webdev')
h = dbh.prepare("select 1 from dual")
start = time.time()
i = 0
write = sys.stderr.write
while i < 10000:
h.execute()
if i % 100 == 0:
f = open("/proc/self/status")
l = f.readlines()
vmsize = l[10][10:-1]
vmrss = l[12][10:-1]
f.close()
write("%d %.3f vsz: %s rss: %s \n" % (i, time.time() - start, vmsize,
vmrss))
i = i + 1
| gpl-2.0 |
ruiliLaMeilleure/11ad-backhaul | bindings/python/ns3modulegen_core_customizations.py | 41 | 18881 | from __future__ import print_function
import sys
import re
from pybindgen.typehandlers import base as typehandlers
from pybindgen import ReturnValue, Parameter
from pybindgen.cppmethod import CustomCppMethodWrapper, CustomCppConstructorWrapper
from pybindgen.typehandlers.codesink import MemoryCodeSink
from pybindgen.typehandlers import ctypeparser
from pybindgen.typehandlers.base import ForwardWrapperBase
from pybindgen import cppclass
import warnings
from pybindgen.typehandlers.base import CodeGenerationError
import sys
class SmartPointerTransformation(typehandlers.TypeTransformation):
"""
This class provides a "type transformation" that tends to support
NS-3 smart pointers. Parameters such as "Ptr<Foo> foo" are
transformed into something like Parameter.new("Foo*", "foo",
transfer_ownership=False). Return values such as Ptr<Foo> are
transformed into ReturnValue.new("Foo*",
caller_owns_return=False). Since the underlying objects have
reference counting, PyBindGen does the right thing.
"""
def __init__(self):
super(SmartPointerTransformation, self).__init__()
self.rx = re.compile(r'(ns3::|::ns3::|)Ptr<([^>]+)>\s*$')
print("{0!r}".format(self), file=sys.stderr)
def _get_untransformed_type_traits(self, name):
m = self.rx.match(name)
is_const = False
if m is None:
print("{0!r} did not match".format(name), file=sys.stderr)
return None, False
else:
name1 = m.group(2).strip()
if name1.startswith('const '):
name1 = name1[len('const '):]
is_const = True
if name1.endswith(' const'):
name1 = name1[:-len(' const')]
is_const = True
new_name = name1+' *'
if new_name.startswith('::'):
new_name = new_name[2:]
return new_name, is_const
def get_untransformed_name(self, name):
new_name, dummy_is_const = self._get_untransformed_type_traits(name)
return new_name
def create_type_handler(self, type_handler, *args, **kwargs):
if issubclass(type_handler, Parameter):
kwargs['transfer_ownership'] = False
elif issubclass(type_handler, ReturnValue):
kwargs['caller_owns_return'] = False
else:
raise AssertionError
## fix the ctype, add ns3:: namespace
orig_ctype, is_const = self._get_untransformed_type_traits(args[0])
if is_const:
correct_ctype = 'ns3::Ptr< {0} const >'.format(orig_ctype[:-2])
else:
correct_ctype = 'ns3::Ptr< {0} >'.format(orig_ctype[:-2])
args = tuple([correct_ctype] + list(args[1:]))
handler = type_handler(*args, **kwargs)
handler.set_tranformation(self, orig_ctype)
return handler
def untransform(self, type_handler, declarations, code_block, expression):
return 'const_cast<%s> (ns3::PeekPointer (%s))' % (type_handler.untransformed_ctype, expression)
def transform(self, type_handler, declarations, code_block, expression):
assert type_handler.untransformed_ctype[-1] == '*'
return 'ns3::Ptr< %s > (%s)' % (type_handler.untransformed_ctype[:-1], expression)
## register the type transformation
transf = SmartPointerTransformation()
typehandlers.return_type_matcher.register_transformation(transf)
typehandlers.param_type_matcher.register_transformation(transf)
del transf
class CallbackImplProxyMethod(typehandlers.ReverseWrapperBase):
"""
Class that generates a proxy virtual method that calls a similarly named python method.
"""
def __init__(self, return_value, parameters):
super(CallbackImplProxyMethod, self).__init__(return_value, parameters)
def generate_python_call(self):
"""code to call the python method"""
build_params = self.build_params.get_parameters(force_tuple_creation=True)
if build_params[0][0] == '"':
build_params[0] = '(char *) ' + build_params[0]
args = self.before_call.declare_variable('PyObject*', 'args')
self.before_call.write_code('%s = Py_BuildValue(%s);'
% (args, ', '.join(build_params)))
self.before_call.add_cleanup_code('Py_DECREF(%s);' % args)
self.before_call.write_code('py_retval = PyObject_CallObject(m_callback, %s);' % args)
self.before_call.write_error_check('py_retval == NULL')
self.before_call.add_cleanup_code('Py_DECREF(py_retval);')
def register_callback_classes(out, callbacks):
for callback_impl_num, template_parameters in enumerate(callbacks):
cls_name = "ns3::Callback< %s >" % ', '.join(template_parameters)
#print >> sys.stderr, "***** trying to register callback: %r" % cls_name
class_name = "PythonCallbackImpl%i" % callback_impl_num
class PythonCallbackParameter(Parameter):
"Class handlers"
CTYPES = [cls_name]
print("***** registering callback handler: %r (%r)" % (ctypeparser.normalize_type_string(cls_name), cls_name), file=sys.stderr)
DIRECTIONS = [Parameter.DIRECTION_IN]
PYTHON_CALLBACK_IMPL_NAME = class_name
TEMPLATE_ARGS = template_parameters
DISABLED = False
def convert_python_to_c(self, wrapper):
"parses python args to get C++ value"
assert isinstance(wrapper, typehandlers.ForwardWrapperBase)
if self.DISABLED:
raise CodeGenerationError("wrapper could not be generated")
if self.default_value is None:
py_callback = wrapper.declarations.declare_variable('PyObject*', self.name)
wrapper.parse_params.add_parameter('O', ['&'+py_callback], self.name)
wrapper.before_call.write_error_check(
'!PyCallable_Check(%s)' % py_callback,
'PyErr_SetString(PyExc_TypeError, "parameter \'%s\' must be callbale");' % self.name)
callback_impl = wrapper.declarations.declare_variable(
'ns3::Ptr<%s>' % self.PYTHON_CALLBACK_IMPL_NAME,
'%s_cb_impl' % self.name)
wrapper.before_call.write_code("%s = ns3::Create<%s> (%s);"
% (callback_impl, self.PYTHON_CALLBACK_IMPL_NAME, py_callback))
wrapper.call_params.append(
'ns3::Callback<%s> (%s)' % (', '.join(self.TEMPLATE_ARGS), callback_impl))
else:
py_callback = wrapper.declarations.declare_variable('PyObject*', self.name, 'NULL')
wrapper.parse_params.add_parameter('O', ['&'+py_callback], self.name, optional=True)
value = wrapper.declarations.declare_variable(
'ns3::Callback<%s>' % ', '.join(self.TEMPLATE_ARGS),
self.name+'_value',
self.default_value)
wrapper.before_call.write_code("if (%s) {" % (py_callback,))
wrapper.before_call.indent()
wrapper.before_call.write_error_check(
'!PyCallable_Check(%s)' % py_callback,
'PyErr_SetString(PyExc_TypeError, "parameter \'%s\' must be callbale");' % self.name)
wrapper.before_call.write_code("%s = ns3::Callback<%s> (ns3::Create<%s> (%s));"
% (value, ', '.join(self.TEMPLATE_ARGS),
self.PYTHON_CALLBACK_IMPL_NAME, py_callback))
wrapper.before_call.unindent()
wrapper.before_call.write_code("}") # closes: if (py_callback) {
wrapper.call_params.append(value)
def convert_c_to_python(self, wrapper):
raise typehandlers.NotSupportedError("Reverse wrappers for ns3::Callback<...> types "
"(python using callbacks defined in C++) not implemented.")
def generate_callback_classes(module, callbacks):
out = module.after_forward_declarations
for callback_impl_num, template_parameters in enumerate(callbacks):
sink = MemoryCodeSink()
cls_name = "ns3::Callback< %s >" % ', '.join(template_parameters)
#print >> sys.stderr, "***** trying to register callback: %r" % cls_name
class_name = "PythonCallbackImpl%i" % callback_impl_num
sink.writeln('''
class %s : public ns3::CallbackImpl<%s>
{
public:
PyObject *m_callback;
%s(PyObject *callback)
{
Py_INCREF(callback);
m_callback = callback;
}
virtual ~%s()
{
PyGILState_STATE __py_gil_state;
__py_gil_state = (PyEval_ThreadsInitialized() ? PyGILState_Ensure() : (PyGILState_STATE) 0);
Py_DECREF(m_callback);
m_callback = NULL;
PyGILState_Release(__py_gil_state);
}
virtual bool IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other_base) const
{
const %s *other = dynamic_cast<const %s*> (ns3::PeekPointer (other_base));
if (other != NULL)
return (other->m_callback == m_callback);
else
return false;
}
''' % (class_name, ', '.join(template_parameters), class_name, class_name, class_name, class_name))
sink.indent()
callback_return = template_parameters[0]
return_ctype = ctypeparser.parse_type(callback_return)
if ('const' in return_ctype.remove_modifiers()):
kwargs = {'is_const': True}
else:
kwargs = {}
try:
return_type = ReturnValue.new(str(return_ctype), **kwargs)
except (typehandlers.TypeLookupError, typehandlers.TypeConfigurationError) as ex:
warnings.warn("***** Unable to register callback; Return value '%s' error (used in %s): %r"
% (callback_return, cls_name, ex),
Warning)
continue
arguments = []
ok = True
callback_parameters = [arg for arg in template_parameters[1:] if arg != 'ns3::empty']
for arg_num, arg_type in enumerate(callback_parameters):
arg_name = 'arg%i' % (arg_num+1)
param_ctype = ctypeparser.parse_type(arg_type)
if ('const' in param_ctype.remove_modifiers()):
kwargs = {'is_const': True}
else:
kwargs = {}
try:
arguments.append(Parameter.new(str(param_ctype), arg_name, **kwargs))
except (typehandlers.TypeLookupError, typehandlers.TypeConfigurationError) as ex:
warnings.warn("***** Unable to register callback; parameter '%s %s' error (used in %s): %r"
% (arg_type, arg_name, cls_name, ex),
Warning)
ok = False
if not ok:
try:
typehandlers.return_type_matcher.lookup(cls_name)[0].DISABLED = True
except typehandlers.TypeLookupError:
pass
try:
typehandlers.param_type_matcher.lookup(cls_name)[0].DISABLED = True
except typehandlers.TypeLookupError:
pass
continue
wrapper = CallbackImplProxyMethod(return_type, arguments)
wrapper.generate(sink, 'operator()', decl_modifiers=[])
sink.unindent()
sink.writeln('};\n')
print("Flushing to ", out, file=sys.stderr)
sink.flush_to(out)
# def write_preamble(out):
# pybindgen.write_preamble(out)
# out.writeln("#include \"ns3/everything.h\"")
def Simulator_customizations(module):
Simulator = module['ns3::Simulator']
## Simulator::Schedule(delay, callback, ...user..args...)
Simulator.add_custom_method_wrapper("Schedule", "_wrap_Simulator_Schedule",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
## Simulator::ScheduleNow(callback, ...user..args...)
Simulator.add_custom_method_wrapper("ScheduleNow", "_wrap_Simulator_ScheduleNow",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
## Simulator::ScheduleDestroy(callback, ...user..args...)
Simulator.add_custom_method_wrapper("ScheduleDestroy", "_wrap_Simulator_ScheduleDestroy",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
Simulator.add_custom_method_wrapper("Run", "_wrap_Simulator_Run",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
def CommandLine_customizations(module):
CommandLine = module['ns3::CommandLine']
CommandLine.add_method('Parse', None, [ArgvParam(None, 'argv')],
is_static=False)
CommandLine.add_custom_method_wrapper("AddValue", "_wrap_CommandLine_AddValue",
flags=["METH_VARARGS", "METH_KEYWORDS"])
def Object_customizations(module):
## ---------------------------------------------------------------------
## Here we generate custom constructor code for all classes that
## derive from ns3::Object. The custom constructors are needed in
## order to support kwargs only and to translate kwargs into ns3
## attributes, etc.
## ---------------------------------------------------------------------
try:
Object = module['ns3::Object']
except KeyError:
return
## add a GetTypeId method to all generatd helper classes
def helper_class_hook(helper_class):
decl = """
static ns3::TypeId GetTypeId (void)
{
static ns3::TypeId tid = ns3::TypeId ("%s")
.SetParent< %s > ()
;
return tid;
}""" % (helper_class.name, helper_class.class_.full_name)
helper_class.add_custom_method(decl)
helper_class.add_post_generation_code(
"NS_OBJECT_ENSURE_REGISTERED (%s);" % helper_class.name)
Object.add_helper_class_hook(helper_class_hook)
def ns3_object_instance_creation_function(cpp_class, code_block, lvalue,
parameters, construct_type_name):
assert lvalue
assert not lvalue.startswith('None')
if cpp_class.cannot_be_constructed:
raise CodeGenerationError("%s cannot be constructed (%s)"
% cpp_class.full_name)
if cpp_class.incomplete_type:
raise CodeGenerationError("%s cannot be constructed (incomplete type)"
% cpp_class.full_name)
code_block.write_code("%s = new %s(%s);" % (lvalue, construct_type_name, parameters))
code_block.write_code("%s->Ref ();" % (lvalue))
def ns3_object_post_instance_creation_function(cpp_class, code_block, lvalue,
parameters, construct_type_name):
code_block.write_code("ns3::CompleteConstruct(%s);" % (lvalue, ))
Object.set_instance_creation_function(ns3_object_instance_creation_function)
Object.set_post_instance_creation_function(ns3_object_post_instance_creation_function)
def Attribute_customizations(module):
# Fix up for the "const AttributeValue &v = EmptyAttribute()"
# case, as used extensively by helper classes.
# Here's why we need to do this: pybindgen.gccxmlscanner, when
# scanning parameter default values, is only provided with the
# value as a simple C expression string. (py)gccxml does not
# report the type of the default value.
# As a workaround, here we iterate over all parameters of all
# methods of all classes and tell pybindgen what is the type of
# the default value for attributes.
for cls in module.classes:
for meth in cls.get_all_methods():
for param in meth.parameters:
if isinstance(param, cppclass.CppClassRefParameter):
if param.cpp_class.name == 'AttributeValue' \
and param.default_value is not None \
and param.default_value_type is None:
param.default_value_type = 'ns3::EmptyAttributeValue'
def TypeId_customizations(module):
TypeId = module['ns3::TypeId']
TypeId.add_custom_method_wrapper("LookupByNameFailSafe", "_wrap_TypeId_LookupByNameFailSafe",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
def add_std_ofstream(module):
module.add_include('<fstream>')
ostream = module.add_class('ostream', foreign_cpp_namespace='::std')
ostream.set_cannot_be_constructed("abstract base class")
ofstream = module.add_class('ofstream', foreign_cpp_namespace='::std', parent=ostream)
ofstream.add_enum('openmode', [
('app', 'std::ios_base::app'),
('ate', 'std::ios_base::ate'),
('binary', 'std::ios_base::binary'),
('in', 'std::ios_base::in'),
('out', 'std::ios_base::out'),
('trunc', 'std::ios_base::trunc'),
])
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
ofstream.add_method('close', None, [])
add_std_ios_openmode(module)
class IosOpenmodeParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['std::ios::openmode', 'std::_Ios_Openmode']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('i', [self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable("std::ios::openmode", self.name, self.default_value)
wrapper.parse_params.add_parameter('i', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
def add_std_ios_openmode(module):
for flag in 'in', 'out', 'ate', 'app', 'trunc', 'binary':
module.after_init.write_code('PyModule_AddIntConstant(m, (char *) "STD_IOS_%s", std::ios::%s);'
% (flag.upper(), flag))
def add_ipv4_address_tp_hash(module):
module.body.writeln('''
long
_ns3_Ipv4Address_tp_hash (PyObject *obj)
{
PyNs3Ipv4Address *addr = reinterpret_cast<PyNs3Ipv4Address *> (obj);
return static_cast<long> (ns3::Ipv4AddressHash () (*addr->obj));
}
''')
module.header.writeln('long _ns3_Ipv4Address_tp_hash (PyObject *obj);')
module['Ipv4Address'].pytype.slots['tp_hash'] = "_ns3_Ipv4Address_tp_hash"
| gpl-2.0 |
jehiah/tornado | tornado/test/asyncio_test.py | 24 | 4766 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function
from tornado import gen
from tornado.testing import AsyncTestCase, gen_test
from tornado.test.util import unittest, skipBefore33, skipBefore35, exec_test
try:
from tornado.platform.asyncio import asyncio
except ImportError:
asyncio = None
else:
from tornado.platform.asyncio import AsyncIOLoop, to_asyncio_future
# This is used in dynamically-evaluated code, so silence pyflakes.
to_asyncio_future
@unittest.skipIf(asyncio is None, "asyncio module not present")
class AsyncIOLoopTest(AsyncTestCase):
def get_new_ioloop(self):
io_loop = AsyncIOLoop()
asyncio.set_event_loop(io_loop.asyncio_loop)
return io_loop
def test_asyncio_callback(self):
# Basic test that the asyncio loop is set up correctly.
asyncio.get_event_loop().call_soon(self.stop)
self.wait()
@gen_test
def test_asyncio_future(self):
# Test that we can yield an asyncio future from a tornado coroutine.
# Without 'yield from', we must wrap coroutines in ensure_future,
# which was introduced during Python 3.4, deprecating the prior "async".
if hasattr(asyncio, 'ensure_future'):
ensure_future = asyncio.ensure_future
else:
ensure_future = asyncio.async
x = yield ensure_future(
asyncio.get_event_loop().run_in_executor(None, lambda: 42))
self.assertEqual(x, 42)
@skipBefore33
@gen_test
def test_asyncio_yield_from(self):
# Test that we can use asyncio coroutines with 'yield from'
# instead of asyncio.async(). This requires python 3.3 syntax.
namespace = exec_test(globals(), locals(), """
@gen.coroutine
def f():
event_loop = asyncio.get_event_loop()
x = yield from event_loop.run_in_executor(None, lambda: 42)
return x
""")
result = yield namespace['f']()
self.assertEqual(result, 42)
@skipBefore35
def test_asyncio_adapter(self):
# This test demonstrates that when using the asyncio coroutine
# runner (i.e. run_until_complete), the to_asyncio_future
# adapter is needed. No adapter is needed in the other direction,
# as demonstrated by other tests in the package.
@gen.coroutine
def tornado_coroutine():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return(42)
native_coroutine_without_adapter = exec_test(globals(), locals(), """
async def native_coroutine_without_adapter():
return await tornado_coroutine()
""")["native_coroutine_without_adapter"]
native_coroutine_with_adapter = exec_test(globals(), locals(), """
async def native_coroutine_with_adapter():
return await to_asyncio_future(tornado_coroutine())
""")["native_coroutine_with_adapter"]
# Use the adapter, but two degrees from the tornado coroutine.
native_coroutine_with_adapter2 = exec_test(globals(), locals(), """
async def native_coroutine_with_adapter2():
return await to_asyncio_future(native_coroutine_without_adapter())
""")["native_coroutine_with_adapter2"]
# Tornado supports native coroutines both with and without adapters
self.assertEqual(
self.io_loop.run_sync(native_coroutine_without_adapter),
42)
self.assertEqual(
self.io_loop.run_sync(native_coroutine_with_adapter),
42)
self.assertEqual(
self.io_loop.run_sync(native_coroutine_with_adapter2),
42)
# Asyncio only supports coroutines that yield asyncio-compatible
# Futures.
with self.assertRaises(RuntimeError):
asyncio.get_event_loop().run_until_complete(
native_coroutine_without_adapter())
self.assertEqual(
asyncio.get_event_loop().run_until_complete(
native_coroutine_with_adapter()),
42)
self.assertEqual(
asyncio.get_event_loop().run_until_complete(
native_coroutine_with_adapter2()),
42)
| apache-2.0 |
rfhk/awo-custom | sale_line_quant_extended/models/stock_move.py | 1 | 10584 | # -*- coding: utf-8 -*-
# Copyright 2015-2017 Quartile Limted
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, fields, api, _
class StockMove(models.Model):
_inherit = "stock.move"
pick_partner_id = fields.Many2one(
related='picking_id.partner_id',
store=True,
readonly=True,
string='Pick Partner'
)
picking_type_code = fields.Selection(
related='picking_type_id.code',
store=True,
readonly=True,
string='Picking Type Code'
)
quant_lot_id = fields.Many2one(
'stock.production.lot',
compute='_get_quant_info',
store=True,
readonly=True,
string='Case No.'
)
quant_owner_id = fields.Many2one(
'res.partner',
compute='_get_quant_info',
store=True,
readonly=True,
string='Owner'
)
so_id = fields.Many2one(
'sale.order',
compute='_get_vals',
store=True,
readonly=True,
string='SO'
)
po_id = fields.Many2one(
'purchase.order',
compute='_get_vals',
store=True,
readonly=True,
string='PO'
)
is_mto = fields.Boolean('Make to Order',
compute='_compute_mto',
store=True,
)
@api.multi
def name_get(self):
res = []
for line in self:
name = line.location_id.name + ' > ' + line.location_dest_id.name
if line.product_id.code:
name = line.product_id.code + ': ' + name
if line.picking_id.origin:
pick_rec = self.env['stock.picking'].search(
[('name','=',line.picking_id.origin)])
if pick_rec.picking_type_id.code == 'incoming':
name = line.picking_id.name + '/ ' + name
else:
name = line.picking_id.origin + '/ ' + name
res.append((line.id, name))
return res
@api.multi
@api.depends('quant_ids', 'reserved_quant_ids', 'lot_id')
def _get_quant_info(self):
for m in self:
if m.quant_ids:
m.quant_lot_id = m.quant_ids[0].lot_id and \
m.quant_ids[0].lot_id.id
m.quant_owner_id = m.quant_ids[0].owner_id and \
m.quant_ids[0].owner_id.id
elif m.reserved_quant_ids:
m.quant_lot_id = m.reserved_quant_ids[0].lot_id and \
m.reserved_quant_ids[0].lot_id.id
m.quant_owner_id = m.reserved_quant_ids[0].owner_id and \
m.reserved_quant_ids[0].owner_id.id
else:
m.quant_lot_id = m.lot_id.id
# below part does not work since quant is generated after
# this step
# if m.lot_id.quant_ids:
# m.quant_owner_id = m.lot_id.quant_ids[-1].owner_id and \
# m.lot_id.quant_ids[-1].owner_id.owner_id.id
def _get_quant_info_init(self, cr, uid):
# update quant info when installing/upgrading
cr.execute("""
update stock_move m1
set quant_lot_id = lot, quant_owner_id = owner
from (select q.lot_id as lot, q.owner_id as owner, m2.id as id
from stock_quant q
join stock_move m2 on q.reservation_id = m2.id) as subq
where m1.id = subq.id
and quant_lot_id is null
""")
@api.multi
@api.depends('origin')
def _get_vals(self):
SO = self.env['sale.order']
PO = self.env['purchase.order']
for m in self:
m.so_id, m.po_id = 0, 0
if m.purchase_line_id:
m.po_id = m.purchase_line_id.order_id.id
elif m.procurement_id and m.procurement_id.sale_line_id:
m.so_id = m.procurement_id.sale_line_id.order_id.id
@api.one
@api.depends('procurement_id', 'purchase_line_id')
def _compute_mto(self):
if self.code == 'outgoing' and self.procurement_id and \
self.procurement_id.sale_line_id:
self.is_mto = self.procurement_id.sale_line_id.mto
elif self.code == 'incoming' and self.purchase_line_id:
self.is_mto = self.purchase_line_id.mto
# def init(self, cr):
# move_ids = self.search(cr, SUPERUSER_ID, [])
# for m in self.browse(cr, SUPERUSER_ID, move_ids):
# m.pick_partner_id = m.picking_id.partner_id and m.picking_id.partner_id.id
# if m.quant_ids:
# m.quant_lot_id = m.quant_ids[0].lot_id and m.quant_ids[0].lot_id.id
# m.quant_owner_id = m.quant_ids[0].owner_id and m.quant_ids[0].owner_id.id
@api.model
def _prepare_picking_assign(self, move):
res = super(StockMove, self)._prepare_picking_assign(move)
res['is_mto'] = move.is_mto
return res
def action_assign(self, cr, uid, ids, context=None):
# NEED TO OVERRIDE COMPLETE METHOD SINCE LOGIC WAS IN BETWEEN THE
# LINES. SEE #oscg TAG FOR CHANGES DONE ON THIS METHOD.
""" Checks the product type and accordingly writes the state.
"""
context = context or {}
quant_obj = self.pool.get("stock.quant")
to_assign_moves = []
main_domain = {}
todo_moves = []
operations = set()
for move in self.browse(cr, uid, ids, context=context):
if move.state not in ('confirmed', 'waiting', 'assigned'):
continue
if move.location_id.usage in ('supplier', 'inventory', 'production'):
to_assign_moves.append(move.id)
#in case the move is returned, we want to try to find quants before forcing the assignment
if not move.origin_returned_move_id:
continue
if move.product_id.type == 'consu':
to_assign_moves.append(move.id)
continue
else:
todo_moves.append(move)
#we always keep the quants already assigned and try to find the remaining quantity on quants not assigned only
main_domain[move.id] = [('reservation_id', '=', False), ('qty', '>', 0)]
# oscg add
# this is to prevent reserving quants that are taken by
# quotations for supplier return outgoing move
if move.location_dest_id.usage == 'supplier':
main_domain[move.id] += [('sale_id', '=', False)]
#if the move is preceeded, restrict the choice of quants in the ones moved previously in original move
ancestors = self.find_move_ancestors(cr, uid, move, context=context)
if move.state == 'waiting' and not ancestors:
#if the waiting move hasn't yet any ancestor (PO/MO not confirmed yet), don't find any quant available in stock
main_domain[move.id] += [('id', '=', False)]
elif ancestors:
main_domain[move.id] += [('history_ids', 'in', ancestors)]
#if the move is returned from another, restrict the choice of quants to the ones that follow the returned move
if move.origin_returned_move_id:
main_domain[move.id] += [('history_ids', 'in', move.origin_returned_move_id.id)]
for link in move.linked_move_operation_ids:
operations.add(link.operation_id)
# Check all ops and sort them: we want to process first the packages, then operations with lot then the rest
operations = list(operations)
operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0))
for ops in operations:
#first try to find quants based on specific domains given by linked operations
for record in ops.linked_move_operation_ids:
move = record.move_id
if move.id in main_domain:
domain = main_domain[move.id] + self.pool.get('stock.move.operation.link').get_specific_domain(cr, uid, record, context=context)
qty = record.qty
if qty:
# add a serial number field in SO line, which should be passed to delivery order
# to reserve a quant of the selected serial number
if record.move_id.quant_id: #oscg
quants = [(record.move_id.quant_id, record.move_id.quant_id.qty)] #oscg
else: #oscg
quants = quant_obj.quants_get_prefered_domain(cr,
uid, ops.location_id, move.product_id, qty,
domain=domain, prefered_domain_list=[],
restrict_lot_id=move.restrict_lot_id.id,
restrict_partner_id=move.restrict_partner_id.\
id, context=context) #oscg
quant_obj.quants_reserve(cr, uid, quants, move, record, context=context)
for move in todo_moves:
if move.linked_move_operation_ids:
continue
# then if the move isn't totally assigned, try to find quants without any specific domain
if move.state != 'assigned':
qty_already_assigned = move.reserved_availability
qty = move.product_qty - qty_already_assigned
# add a serial number field in SO line, which should be passed to delivery order
# to reserve a quant of the selected serial number
if move.quant_id: #oscg
quants = [(move.quant_id, qty)] #oscg
else: #oscg
quants = quant_obj.quants_get_prefered_domain(cr, uid,
move.location_id, move.product_id, qty,
domain=main_domain[move.id], prefered_domain_list=[],
restrict_lot_id=move.restrict_lot_id.id,
restrict_partner_id=move.restrict_partner_id.id,
context=context) #oscg
quant_obj.quants_reserve(cr, uid, quants, move, context=context)
#force assignation of consumable products and incoming from supplier/inventory/production
if to_assign_moves:
self.force_assign(cr, uid, to_assign_moves, context=context)
| lgpl-3.0 |
fzalkow/scikit-learn | sklearn/linear_model/tests/test_omp.py | 272 | 7752 | # Author: Vlad Niculae
# Licence: BSD 3 clause
import numpy as np
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import ignore_warnings
from sklearn.linear_model import (orthogonal_mp, orthogonal_mp_gram,
OrthogonalMatchingPursuit,
OrthogonalMatchingPursuitCV,
LinearRegression)
from sklearn.utils import check_random_state
from sklearn.datasets import make_sparse_coded_signal
n_samples, n_features, n_nonzero_coefs, n_targets = 20, 30, 5, 3
y, X, gamma = make_sparse_coded_signal(n_targets, n_features, n_samples,
n_nonzero_coefs, random_state=0)
G, Xy = np.dot(X.T, X), np.dot(X.T, y)
# this makes X (n_samples, n_features)
# and y (n_samples, 3)
def test_correct_shapes():
assert_equal(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5).shape,
(n_features,))
assert_equal(orthogonal_mp(X, y, n_nonzero_coefs=5).shape,
(n_features, 3))
def test_correct_shapes_gram():
assert_equal(orthogonal_mp_gram(G, Xy[:, 0], n_nonzero_coefs=5).shape,
(n_features,))
assert_equal(orthogonal_mp_gram(G, Xy, n_nonzero_coefs=5).shape,
(n_features, 3))
def test_n_nonzero_coefs():
assert_true(np.count_nonzero(orthogonal_mp(X, y[:, 0],
n_nonzero_coefs=5)) <= 5)
assert_true(np.count_nonzero(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5,
precompute=True)) <= 5)
def test_tol():
tol = 0.5
gamma = orthogonal_mp(X, y[:, 0], tol=tol)
gamma_gram = orthogonal_mp(X, y[:, 0], tol=tol, precompute=True)
assert_true(np.sum((y[:, 0] - np.dot(X, gamma)) ** 2) <= tol)
assert_true(np.sum((y[:, 0] - np.dot(X, gamma_gram)) ** 2) <= tol)
def test_with_without_gram():
assert_array_almost_equal(
orthogonal_mp(X, y, n_nonzero_coefs=5),
orthogonal_mp(X, y, n_nonzero_coefs=5, precompute=True))
def test_with_without_gram_tol():
assert_array_almost_equal(
orthogonal_mp(X, y, tol=1.),
orthogonal_mp(X, y, tol=1., precompute=True))
def test_unreachable_accuracy():
assert_array_almost_equal(
orthogonal_mp(X, y, tol=0),
orthogonal_mp(X, y, n_nonzero_coefs=n_features))
assert_array_almost_equal(
assert_warns(RuntimeWarning, orthogonal_mp, X, y, tol=0,
precompute=True),
orthogonal_mp(X, y, precompute=True,
n_nonzero_coefs=n_features))
def test_bad_input():
assert_raises(ValueError, orthogonal_mp, X, y, tol=-1)
assert_raises(ValueError, orthogonal_mp, X, y, n_nonzero_coefs=-1)
assert_raises(ValueError, orthogonal_mp, X, y,
n_nonzero_coefs=n_features + 1)
assert_raises(ValueError, orthogonal_mp_gram, G, Xy, tol=-1)
assert_raises(ValueError, orthogonal_mp_gram, G, Xy, n_nonzero_coefs=-1)
assert_raises(ValueError, orthogonal_mp_gram, G, Xy,
n_nonzero_coefs=n_features + 1)
def test_perfect_signal_recovery():
idx, = gamma[:, 0].nonzero()
gamma_rec = orthogonal_mp(X, y[:, 0], 5)
gamma_gram = orthogonal_mp_gram(G, Xy[:, 0], 5)
assert_array_equal(idx, np.flatnonzero(gamma_rec))
assert_array_equal(idx, np.flatnonzero(gamma_gram))
assert_array_almost_equal(gamma[:, 0], gamma_rec, decimal=2)
assert_array_almost_equal(gamma[:, 0], gamma_gram, decimal=2)
def test_estimator():
omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs)
omp.fit(X, y[:, 0])
assert_equal(omp.coef_.shape, (n_features,))
assert_equal(omp.intercept_.shape, ())
assert_true(np.count_nonzero(omp.coef_) <= n_nonzero_coefs)
omp.fit(X, y)
assert_equal(omp.coef_.shape, (n_targets, n_features))
assert_equal(omp.intercept_.shape, (n_targets,))
assert_true(np.count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)
omp.set_params(fit_intercept=False, normalize=False)
omp.fit(X, y[:, 0])
assert_equal(omp.coef_.shape, (n_features,))
assert_equal(omp.intercept_, 0)
assert_true(np.count_nonzero(omp.coef_) <= n_nonzero_coefs)
omp.fit(X, y)
assert_equal(omp.coef_.shape, (n_targets, n_features))
assert_equal(omp.intercept_, 0)
assert_true(np.count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)
def test_identical_regressors():
newX = X.copy()
newX[:, 1] = newX[:, 0]
gamma = np.zeros(n_features)
gamma[0] = gamma[1] = 1.
newy = np.dot(newX, gamma)
assert_warns(RuntimeWarning, orthogonal_mp, newX, newy, 2)
def test_swapped_regressors():
gamma = np.zeros(n_features)
# X[:, 21] should be selected first, then X[:, 0] selected second,
# which will take X[:, 21]'s place in case the algorithm does
# column swapping for optimization (which is the case at the moment)
gamma[21] = 1.0
gamma[0] = 0.5
new_y = np.dot(X, gamma)
new_Xy = np.dot(X.T, new_y)
gamma_hat = orthogonal_mp(X, new_y, 2)
gamma_hat_gram = orthogonal_mp_gram(G, new_Xy, 2)
assert_array_equal(np.flatnonzero(gamma_hat), [0, 21])
assert_array_equal(np.flatnonzero(gamma_hat_gram), [0, 21])
def test_no_atoms():
y_empty = np.zeros_like(y)
Xy_empty = np.dot(X.T, y_empty)
gamma_empty = ignore_warnings(orthogonal_mp)(X, y_empty, 1)
gamma_empty_gram = ignore_warnings(orthogonal_mp)(G, Xy_empty, 1)
assert_equal(np.all(gamma_empty == 0), True)
assert_equal(np.all(gamma_empty_gram == 0), True)
def test_omp_path():
path = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=True)
last = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=False)
assert_equal(path.shape, (n_features, n_targets, 5))
assert_array_almost_equal(path[:, :, -1], last)
path = orthogonal_mp_gram(G, Xy, n_nonzero_coefs=5, return_path=True)
last = orthogonal_mp_gram(G, Xy, n_nonzero_coefs=5, return_path=False)
assert_equal(path.shape, (n_features, n_targets, 5))
assert_array_almost_equal(path[:, :, -1], last)
def test_omp_return_path_prop_with_gram():
path = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=True,
precompute=True)
last = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=False,
precompute=True)
assert_equal(path.shape, (n_features, n_targets, 5))
assert_array_almost_equal(path[:, :, -1], last)
def test_omp_cv():
y_ = y[:, 0]
gamma_ = gamma[:, 0]
ompcv = OrthogonalMatchingPursuitCV(normalize=True, fit_intercept=False,
max_iter=10, cv=5)
ompcv.fit(X, y_)
assert_equal(ompcv.n_nonzero_coefs_, n_nonzero_coefs)
assert_array_almost_equal(ompcv.coef_, gamma_)
omp = OrthogonalMatchingPursuit(normalize=True, fit_intercept=False,
n_nonzero_coefs=ompcv.n_nonzero_coefs_)
omp.fit(X, y_)
assert_array_almost_equal(ompcv.coef_, omp.coef_)
def test_omp_reaches_least_squares():
# Use small simple data; it's a sanity check but OMP can stop early
rng = check_random_state(0)
n_samples, n_features = (10, 8)
n_targets = 3
X = rng.randn(n_samples, n_features)
Y = rng.randn(n_samples, n_targets)
omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_features)
lstsq = LinearRegression()
omp.fit(X, Y)
lstsq.fit(X, Y)
assert_array_almost_equal(omp.coef_, lstsq.coef_)
| bsd-3-clause |
mikebenfield/scipy | tools/gh_lists.py | 99 | 4195 | #!/usr/bin/env python
# -*- encoding:utf-8 -*-
"""
gh_lists.py MILESTONE
Functions for Github API requests.
"""
from __future__ import print_function, division, absolute_import
import os
import re
import sys
import json
import collections
import argparse
from urllib2 import urlopen
Issue = collections.namedtuple('Issue', ('id', 'title', 'url'))
def main():
p = argparse.ArgumentParser(usage=__doc__.lstrip())
p.add_argument('--project', default='scipy/scipy')
p.add_argument('milestone')
args = p.parse_args()
getter = CachedGet('gh_cache.json')
try:
milestones = get_milestones(getter, args.project)
if args.milestone not in milestones:
msg = "Milestone {0} not available. Available milestones: {1}"
msg = msg.format(args.milestone, u", ".join(sorted(milestones)))
p.error(msg)
issues = get_issues(getter, args.project, args.milestone)
issues.sort()
finally:
getter.save()
prs = [x for x in issues if u'/pull/' in x.url]
issues = [x for x in issues if x not in prs]
def print_list(title, items):
print()
print(title)
print("-"*len(title))
print()
for issue in items:
msg = u"- `#{0} <{1}>`__: {2}"
title = re.sub(u"\s+", u" ", issue.title.strip())
if len(title) > 60:
remainder = re.sub(u"\s.*$", u"...", title[60:])
if len(remainder) > 20:
remainder = title[:80] + u"..."
else:
title = title[:60] + remainder
msg = msg.format(issue.id, issue.url, title)
print(msg)
print()
msg = u"Issues closed for {0}".format(args.milestone)
print_list(msg, issues)
msg = u"Pull requests for {0}".format(args.milestone)
print_list(msg, prs)
return 0
def get_milestones(getter, project):
url = "https://api.github.com/repos/{project}/milestones".format(project=project)
raw_data, info = getter.get(url)
data = json.loads(raw_data)
milestones = {}
for ms in data:
milestones[ms[u'title']] = ms[u'number']
return milestones
def get_issues(getter, project, milestone):
milestones = get_milestones(getter, project)
mid = milestones[milestone]
url = "https://api.github.com/repos/{project}/issues?milestone={mid}&state=closed&sort=created&direction=asc"
url = url.format(project=project, mid=mid)
raw_datas = []
while True:
raw_data, info = getter.get(url)
raw_datas.append(raw_data)
if 'link' not in info:
break
m = re.search('<(.*?)>; rel="next"', info['link'])
if m:
url = m.group(1)
continue
break
issues = []
for raw_data in raw_datas:
data = json.loads(raw_data)
for issue_data in data:
issues.append(Issue(issue_data[u'number'],
issue_data[u'title'],
issue_data[u'html_url']))
return issues
class CachedGet(object):
def __init__(self, filename):
self.filename = filename
if os.path.isfile(filename):
print("[gh_lists] using {0} as cache (remove it if you want fresh data)".format(filename),
file=sys.stderr)
with open(filename, 'rb') as f:
self.cache = json.load(f)
else:
self.cache = {}
def get(self, url):
url = unicode(url)
if url not in self.cache:
print("[gh_lists] get:", url, file=sys.stderr)
req = urlopen(url)
if req.getcode() != 200:
raise RuntimeError()
data = req.read()
info = dict(req.info())
self.cache[url] = (data, info)
req.close()
else:
print("[gh_lists] get (cached):", url, file=sys.stderr)
return self.cache[url]
def save(self):
tmp = self.filename + ".new"
with open(tmp, 'wb') as f:
json.dump(self.cache, f)
os.rename(tmp, self.filename)
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause |
run2/citytour | 4symantec/Lib/site-packages/numpy-1.9.2-py2.7-win-amd64.egg/numpy/lib/tests/test_polynomial.py | 32 | 4748 | from __future__ import division, absolute_import, print_function
'''
>>> p = np.poly1d([1.,2,3])
>>> p
poly1d([ 1., 2., 3.])
>>> print(p)
2
1 x + 2 x + 3
>>> q = np.poly1d([3.,2,1])
>>> q
poly1d([ 3., 2., 1.])
>>> print(q)
2
3 x + 2 x + 1
>>> print(np.poly1d([1.89999+2j, -3j, -5.12345678, 2+1j]))
3 2
(1.9 + 2j) x - 3j x - 5.123 x + (2 + 1j)
>>> print(np.poly1d([-3, -2, -1]))
2
-3 x - 2 x - 1
>>> p(0)
3.0
>>> p(5)
38.0
>>> q(0)
1.0
>>> q(5)
86.0
>>> p * q
poly1d([ 3., 8., 14., 8., 3.])
>>> p / q
(poly1d([ 0.33333333]), poly1d([ 1.33333333, 2.66666667]))
>>> p + q
poly1d([ 4., 4., 4.])
>>> p - q
poly1d([-2., 0., 2.])
>>> p ** 4
poly1d([ 1., 8., 36., 104., 214., 312., 324., 216., 81.])
>>> p(q)
poly1d([ 9., 12., 16., 8., 6.])
>>> q(p)
poly1d([ 3., 12., 32., 40., 34.])
>>> np.asarray(p)
array([ 1., 2., 3.])
>>> len(p)
2
>>> p[0], p[1], p[2], p[3]
(3.0, 2.0, 1.0, 0)
>>> p.integ()
poly1d([ 0.33333333, 1. , 3. , 0. ])
>>> p.integ(1)
poly1d([ 0.33333333, 1. , 3. , 0. ])
>>> p.integ(5)
poly1d([ 0.00039683, 0.00277778, 0.025 , 0. , 0. ,
0. , 0. , 0. ])
>>> p.deriv()
poly1d([ 2., 2.])
>>> p.deriv(2)
poly1d([ 2.])
>>> q = np.poly1d([1.,2,3], variable='y')
>>> print(q)
2
1 y + 2 y + 3
>>> q = np.poly1d([1.,2,3], variable='lambda')
>>> print(q)
2
1 lambda + 2 lambda + 3
>>> np.polydiv(np.poly1d([1,0,-1]), np.poly1d([1,1]))
(poly1d([ 1., -1.]), poly1d([ 0.]))
'''
import numpy as np
from numpy.testing import (
run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
assert_almost_equal, rundocs
)
class TestDocs(TestCase):
def test_doctests(self):
return rundocs()
def test_roots(self):
assert_array_equal(np.roots([1, 0, 0]), [0, 0])
def test_str_leading_zeros(self):
p = np.poly1d([4, 3, 2, 1])
p[3] = 0
assert_equal(str(p),
" 2\n"
"3 x + 2 x + 1")
p = np.poly1d([1, 2])
p[0] = 0
p[1] = 0
assert_equal(str(p), " \n0")
def test_polyfit(self):
c = np.array([3., 2., 1.])
x = np.linspace(0, 2, 7)
y = np.polyval(c, x)
err = [1, -1, 1, -1, 1, -1, 1]
weights = np.arange(8, 1, -1)**2/7.0
# check 1D case
m, cov = np.polyfit(x, y+err, 2, cov=True)
est = [3.8571, 0.2857, 1.619]
assert_almost_equal(est, m, decimal=4)
val0 = [[2.9388, -5.8776, 1.6327],
[-5.8776, 12.7347, -4.2449],
[1.6327, -4.2449, 2.3220]]
assert_almost_equal(val0, cov, decimal=4)
m2, cov2 = np.polyfit(x, y+err, 2, w=weights, cov=True)
assert_almost_equal([4.8927, -1.0177, 1.7768], m2, decimal=4)
val = [[8.7929, -10.0103, 0.9756],
[-10.0103, 13.6134, -1.8178],
[0.9756, -1.8178, 0.6674]]
assert_almost_equal(val, cov2, decimal=4)
# check 2D (n,1) case
y = y[:, np.newaxis]
c = c[:, np.newaxis]
assert_almost_equal(c, np.polyfit(x, y, 2))
# check 2D (n,2) case
yy = np.concatenate((y, y), axis=1)
cc = np.concatenate((c, c), axis=1)
assert_almost_equal(cc, np.polyfit(x, yy, 2))
m, cov = np.polyfit(x, yy + np.array(err)[:, np.newaxis], 2, cov=True)
assert_almost_equal(est, m[:, 0], decimal=4)
assert_almost_equal(est, m[:, 1], decimal=4)
assert_almost_equal(val0, cov[:, :, 0], decimal=4)
assert_almost_equal(val0, cov[:, :, 1], decimal=4)
def test_objects(self):
from decimal import Decimal
p = np.poly1d([Decimal('4.0'), Decimal('3.0'), Decimal('2.0')])
p2 = p * Decimal('1.333333333333333')
assert_(p2[1] == Decimal("3.9999999999999990"))
p2 = p.deriv()
assert_(p2[1] == Decimal('8.0'))
p2 = p.integ()
assert_(p2[3] == Decimal("1.333333333333333333333333333"))
assert_(p2[2] == Decimal('1.5'))
assert_(np.issubdtype(p2.coeffs.dtype, np.object_))
def test_complex(self):
p = np.poly1d([3j, 2j, 1j])
p2 = p.integ()
assert_((p2.coeffs == [1j, 1j, 1j, 0]).all())
p2 = p.deriv()
assert_((p2.coeffs == [6j, 2j]).all())
def test_integ_coeffs(self):
p = np.poly1d([3, 2, 1])
p2 = p.integ(3, k=[9, 7, 6])
assert_(
(p2.coeffs == [1/4./5., 1/3./4., 1/2./3., 9/1./2., 7, 6]).all())
def test_zero_dims(self):
try:
np.poly(np.zeros((0, 0)))
except ValueError:
pass
if __name__ == "__main__":
run_module_suite()
| mit |
iobond/aib | share/qt/extract_strings_qt.py | 33 | 2717 | #!/usr/bin/python
# Copyright (c) 2012-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Extract _("...") strings for translation and convert to Qt stringdefs so that
they can be picked up by Qt linguist.
'''
from __future__ import division,print_function,unicode_literals
from subprocess import Popen, PIPE
import glob
import operator
import os
import sys
OUT_CPP="qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = sys.argv[1:]
# xgettext -n --keyword=_ $FILES
XGETTEXT=os.getenv('XGETTEXT', 'xgettext')
if not XGETTEXT:
print('Cannot extract strings: xgettext utility is not installed or not configured.',file=sys.stderr)
print('Please install package "gettext" and re-run \'./configure\'.',file=sys.stderr)
exit(1)
child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out.decode('utf-8'))
f = open(OUT_CPP, 'w')
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings_qt.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('PACKAGE_NAME'),))
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('COPYRIGHT_HOLDERS'),))
if os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION') != os.getenv('PACKAGE_NAME'):
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION'),))
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};\n')
f.close()
| mit |
BT-ojossen/odoo | addons/account_budget/report/analytic_account_budget_report.py | 360 | 7589 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
class analytic_account_budget_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(analytic_account_budget_report, self).__init__(cr, uid, name, context=context)
self.localcontext.update( {
'funct': self.funct,
'funct_total': self.funct_total,
'time': time,
})
self.context = context
def funct(self, object, form, ids=None, done=None, level=1):
if ids is None:
ids = {}
if not ids:
ids = self.ids
if not done:
done = {}
global tot
tot = {
'theo':0.00,
'pln':0.00,
'prac':0.00,
'perc':0.00
}
result = []
accounts = self.pool.get('account.analytic.account').browse(self.cr, self.uid, [object.id], self.context.copy())
c_b_lines_obj = self.pool.get('crossovered.budget.lines')
obj_c_budget = self.pool.get('crossovered.budget')
for account_id in accounts:
res = {}
b_line_ids = []
for line in account_id.crossovered_budget_line:
b_line_ids.append(line.id)
if not b_line_ids:
return []
d_from = form['date_from']
d_to = form['date_to']
self.cr.execute('SELECT DISTINCT(crossovered_budget_id) FROM crossovered_budget_lines WHERE id =ANY(%s)',(b_line_ids,))
budget_ids = self.cr.fetchall()
context = {'wizard_date_from':d_from,'wizard_date_to':d_to}
for i in range(0, len(budget_ids)):
budget_name = obj_c_budget.browse(self.cr, self.uid, [budget_ids[i][0]])
res= {
'b_id':'-1',
'a_id':'-1',
'name':budget_name[0].name,
'status':1,
'theo':0.00,
'pln':0.00,
'prac':0.00,
'perc':0.00
}
result.append(res)
line_ids = c_b_lines_obj.search(self.cr, self.uid, [('id', 'in', b_line_ids), ('crossovered_budget_id','=',budget_ids[i][0])])
line_id = c_b_lines_obj.browse(self.cr, self.uid, line_ids)
tot_theo = tot_pln = tot_prac = tot_perc = 0
done_budget = []
for line in line_id:
if line.id in b_line_ids:
theo = pract = 0.00
theo = c_b_lines_obj._theo_amt(self.cr, self.uid, [line.id], context)[line.id]
pract = c_b_lines_obj._prac_amt(self.cr, self.uid, [line.id], context)[line.id]
if line.general_budget_id.id in done_budget:
for record in result:
if record['b_id'] == line.general_budget_id.id and record['a_id'] == line.analytic_account_id.id:
record['theo'] += theo
record['pln'] += line.planned_amount
record['prac'] += pract
record['perc'] += line.percentage
tot_theo += theo
tot_pln += line.planned_amount
tot_prac += pract
tot_perc += line.percentage
else:
res1 = {
'b_id': line.general_budget_id.id,
'a_id': line.analytic_account_id.id,
'name': line.general_budget_id.name,
'status': 2,
'theo': theo,
'pln': line.planned_amount,
'prac': pract,
'perc': line.percentage
}
tot_theo += theo
tot_pln += line.planned_amount
tot_prac += pract
tot_perc += line.percentage
result.append(res1)
done_budget.append(line.general_budget_id.id)
else:
if line.general_budget_id.id in done_budget:
continue
else:
res1={
'b_id': line.general_budget_id.id,
'a_id': line.analytic_account_id.id,
'name': line.general_budget_id.name,
'status': 2,
'theo': 0.00,
'pln': 0.00,
'prac': 0.00,
'perc': 0.00
}
result.append(res1)
done_budget.append(line.general_budget_id.id)
if tot_theo == 0.00:
tot_perc = 0.00
else:
tot_perc = float(tot_prac / tot_theo) * 100
result[-(len(done_budget) +1)]['theo'] = tot_theo
tot['theo'] +=tot_theo
result[-(len(done_budget) +1)]['pln'] = tot_pln
tot['pln'] +=tot_pln
result[-(len(done_budget) +1)]['prac'] = tot_prac
tot['prac'] +=tot_prac
result[-(len(done_budget) +1)]['perc'] = tot_perc
if tot['theo'] == 0.00:
tot['perc'] = 0.00
else:
tot['perc'] = float(tot['prac'] / tot['theo']) * 100
return result
def funct_total(self, form):
result = []
res = {}
res = {
'tot_theo': tot['theo'],
'tot_pln': tot['pln'],
'tot_prac': tot['prac'],
'tot_perc': tot['perc']
}
result.append(res)
return result
class report_analyticaccountbudget(osv.AbstractModel):
_name = 'report.account_budget.report_analyticaccountbudget'
_inherit = 'report.abstract_report'
_template = 'account_budget.report_analyticaccountbudget'
_wrapped_report_class = analytic_account_budget_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
plepers/three.js | utils/exporters/blender/2.65/scripts/addons/io_mesh_threejs/__init__.py | 6 | 18004 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# ################################################################
# Init
# ################################################################
bl_info = {
"name": "three.js format",
"author": "mrdoob, kikko, alteredq, remoe, pxf, n3tfr34k, crobi",
"version": (1, 5, 0),
"blender": (2, 7, 0),
"location": "File > Import-Export",
"description": "Import-Export three.js meshes",
"warning": "",
"wiki_url": "https://github.com/mrdoob/three.js/tree/master/utils/exporters/blender",
"tracker_url": "https://github.com/mrdoob/three.js/issues",
"category": "Import-Export"}
# To support reload properly, try to access a package var,
# if it's there, reload everything
import bpy
if "bpy" in locals():
import imp
if "export_threejs" in locals():
imp.reload(export_threejs)
if "import_threejs" in locals():
imp.reload(import_threejs)
from bpy.props import *
from bpy_extras.io_utils import ExportHelper, ImportHelper
# ################################################################
# Custom properties
# ################################################################
bpy.types.Object.THREE_castShadow = bpy.props.BoolProperty()
bpy.types.Object.THREE_receiveShadow = bpy.props.BoolProperty()
bpy.types.Object.THREE_doubleSided = bpy.props.BoolProperty()
bpy.types.Object.THREE_exportGeometry = bpy.props.BoolProperty(default = True)
bpy.types.Object.THREE_visible = bpy.props.BoolProperty(default = True)
bpy.types.Material.THREE_useVertexColors = bpy.props.BoolProperty()
bpy.types.Material.THREE_depthWrite = bpy.props.BoolProperty(default = True)
bpy.types.Material.THREE_depthTest = bpy.props.BoolProperty(default = True)
THREE_material_types = [("Basic", "Basic", "Basic"), ("Phong", "Phong", "Phong"), ("Lambert", "Lambert", "Lambert")]
bpy.types.Material.THREE_materialType = EnumProperty(name = "Material type", description = "Material type", items = THREE_material_types, default = "Lambert")
THREE_blending_types = [("NoBlending", "NoBlending", "NoBlending"), ("NormalBlending", "NormalBlending", "NormalBlending"),
("AdditiveBlending", "AdditiveBlending", "AdditiveBlending"), ("SubtractiveBlending", "SubtractiveBlending", "SubtractiveBlending"),
("MultiplyBlending", "MultiplyBlending", "MultiplyBlending"), ("AdditiveAlphaBlending", "AdditiveAlphaBlending", "AdditiveAlphaBlending")]
bpy.types.Material.THREE_blendingType = EnumProperty(name = "Blending type", description = "Blending type", items = THREE_blending_types, default = "NormalBlending")
class OBJECT_PT_hello( bpy.types.Panel ):
bl_label = "THREE"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "object"
def draw(self, context):
layout = self.layout
obj = context.object
row = layout.row()
row.label(text="Selected object: " + obj.name )
row = layout.row()
row.prop( obj, "THREE_exportGeometry", text="Export geometry" )
row = layout.row()
row.prop( obj, "THREE_castShadow", text="Casts shadow" )
row = layout.row()
row.prop( obj, "THREE_receiveShadow", text="Receives shadow" )
row = layout.row()
row.prop( obj, "THREE_doubleSided", text="Double sided" )
row = layout.row()
row.prop( obj, "THREE_visible", text="Visible" )
class MATERIAL_PT_hello( bpy.types.Panel ):
bl_label = "THREE"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "material"
def draw(self, context):
layout = self.layout
mat = context.material
row = layout.row()
row.label(text="Selected material: " + mat.name )
row = layout.row()
row.prop( mat, "THREE_materialType", text="Material type" )
row = layout.row()
row.prop( mat, "THREE_blendingType", text="Blending type" )
row = layout.row()
row.prop( mat, "THREE_useVertexColors", text="Use vertex colors" )
row = layout.row()
row.prop( mat, "THREE_depthWrite", text="Enable depth writing" )
row = layout.row()
row.prop( mat, "THREE_depthTest", text="Enable depth testing" )
# ################################################################
# Importer
# ################################################################
class ImportTHREEJS(bpy.types.Operator, ImportHelper):
'''Load a Three.js ASCII JSON model'''
bl_idname = "import.threejs"
bl_label = "Import Three.js"
filename_ext = ".json"
filter_glob = StringProperty(default="*.json", options={'HIDDEN'})
option_flip_yz = BoolProperty(name="Flip YZ", description="Flip YZ", default=True)
recalculate_normals = BoolProperty(name="Recalculate normals", description="Recalculate vertex normals", default=True)
option_worker = BoolProperty(name="Worker", description="Old format using workers", default=False)
def execute(self, context):
import io_mesh_threejs.import_threejs
return io_mesh_threejs.import_threejs.load(self, context, **self.properties)
def draw(self, context):
layout = self.layout
row = layout.row()
row.prop(self.properties, "option_flip_yz")
row = layout.row()
row.prop(self.properties, "recalculate_normals")
row = layout.row()
row.prop(self.properties, "option_worker")
# ################################################################
# Exporter - settings
# ################################################################
SETTINGS_FILE_EXPORT = "threejs_settings_export.js"
import os
import json
def file_exists(filename):
"""Return true if file exists and accessible for reading.
Should be safer than just testing for existence due to links and
permissions magic on Unix filesystems.
@rtype: boolean
"""
try:
f = open(filename, 'r')
f.close()
return True
except IOError:
return False
def get_settings_fullpath():
return os.path.join(bpy.app.tempdir, SETTINGS_FILE_EXPORT)
def save_settings_export(properties):
settings = {
"option_export_scene" : properties.option_export_scene,
"option_embed_meshes" : properties.option_embed_meshes,
"option_url_base_html" : properties.option_url_base_html,
"option_copy_textures" : properties.option_copy_textures,
"option_lights" : properties.option_lights,
"option_cameras" : properties.option_cameras,
"option_animation_morph" : properties.option_animation_morph,
"option_animation_skeletal" : properties.option_animation_skeletal,
"option_frame_index_as_time" : properties.option_frame_index_as_time,
"option_frame_step" : properties.option_frame_step,
"option_all_meshes" : properties.option_all_meshes,
"option_flip_yz" : properties.option_flip_yz,
"option_materials" : properties.option_materials,
"option_normals" : properties.option_normals,
"option_colors" : properties.option_colors,
"option_uv_coords" : properties.option_uv_coords,
"option_faces" : properties.option_faces,
"option_vertices" : properties.option_vertices,
"option_skinning" : properties.option_skinning,
"option_bones" : properties.option_bones,
"option_vertices_truncate" : properties.option_vertices_truncate,
"option_scale" : properties.option_scale,
"align_model" : properties.align_model
}
fname = get_settings_fullpath()
f = open(fname, "w")
json.dump(settings, f)
def restore_settings_export(properties):
settings = {}
fname = get_settings_fullpath()
if file_exists(fname):
f = open(fname, "r")
settings = json.load(f)
properties.option_vertices = settings.get("option_vertices", True)
properties.option_vertices_truncate = settings.get("option_vertices_truncate", False)
properties.option_faces = settings.get("option_faces", True)
properties.option_normals = settings.get("option_normals", True)
properties.option_colors = settings.get("option_colors", True)
properties.option_uv_coords = settings.get("option_uv_coords", True)
properties.option_materials = settings.get("option_materials", True)
properties.option_skinning = settings.get("option_skinning", True)
properties.option_bones = settings.get("option_bones", True)
properties.align_model = settings.get("align_model", "None")
properties.option_scale = settings.get("option_scale", 1.0)
properties.option_flip_yz = settings.get("option_flip_yz", True)
properties.option_export_scene = settings.get("option_export_scene", False)
properties.option_embed_meshes = settings.get("option_embed_meshes", True)
properties.option_url_base_html = settings.get("option_url_base_html", False)
properties.option_copy_textures = settings.get("option_copy_textures", False)
properties.option_lights = settings.get("option_lights", False)
properties.option_cameras = settings.get("option_cameras", False)
properties.option_animation_morph = settings.get("option_animation_morph", False)
properties.option_animation_skeletal = settings.get("option_animation_skeletal", False)
properties.option_frame_index_as_time = settings.get("option_frame_index_as_time", False)
properties.option_frame_step = settings.get("option_frame_step", 1)
properties.option_all_meshes = settings.get("option_all_meshes", True)
# ################################################################
# Exporter
# ################################################################
class ExportTHREEJS(bpy.types.Operator, ExportHelper):
'''Export selected object / scene for Three.js (ASCII JSON format).'''
bl_idname = "export.threejs"
bl_label = "Export Three.js"
filename_ext = ".json"
option_vertices = BoolProperty(name = "Vertices", description = "Export vertices", default = True)
option_vertices_deltas = BoolProperty(name = "Deltas", description = "Delta vertices", default = False)
option_vertices_truncate = BoolProperty(name = "Truncate", description = "Truncate vertices", default = False)
option_faces = BoolProperty(name = "Faces", description = "Export faces", default = True)
option_faces_deltas = BoolProperty(name = "Deltas", description = "Delta faces", default = False)
option_normals = BoolProperty(name = "Normals", description = "Export normals", default = True)
option_colors = BoolProperty(name = "Colors", description = "Export vertex colors", default = True)
option_uv_coords = BoolProperty(name = "UVs", description = "Export texture coordinates", default = True)
option_materials = BoolProperty(name = "Materials", description = "Export materials", default = True)
option_skinning = BoolProperty(name = "Skinning", description = "Export skin data", default = True)
option_bones = BoolProperty(name = "Bones", description = "Export bones", default = True)
align_types = [("None","None","None"), ("Center","Center","Center"), ("Bottom","Bottom","Bottom"), ("Top","Top","Top")]
align_model = EnumProperty(name = "Align model", description = "Align model", items = align_types, default = "None")
option_scale = FloatProperty(name = "Scale", description = "Scale vertices", min = 0.01, max = 1000.0, soft_min = 0.01, soft_max = 1000.0, default = 1.0)
option_flip_yz = BoolProperty(name = "Flip YZ", description = "Flip YZ", default = True)
option_export_scene = BoolProperty(name = "Scene", description = "Export scene", default = False)
option_embed_meshes = BoolProperty(name = "Embed meshes", description = "Embed meshes", default = True)
option_copy_textures = BoolProperty(name = "Copy textures", description = "Copy textures", default = False)
option_url_base_html = BoolProperty(name = "HTML as url base", description = "Use HTML as url base ", default = False)
option_lights = BoolProperty(name = "Lights", description = "Export default scene lights", default = False)
option_cameras = BoolProperty(name = "Cameras", description = "Export default scene cameras", default = False)
option_animation_morph = BoolProperty(name = "Morph animation", description = "Export animation (morphs)", default = False)
option_animation_skeletal = BoolProperty(name = "Skeletal animation", description = "Export animation (skeletal)", default = False)
option_frame_index_as_time = BoolProperty(name = "Frame index as time", description = "Use (original) frame index as frame time", default = False)
option_frame_step = IntProperty(name = "Frame step", description = "Animation frame step", min = 1, max = 1000, soft_min = 1, soft_max = 1000, default = 1)
option_all_meshes = BoolProperty(name = "All meshes", description = "All meshes (merged)", default = True)
def invoke(self, context, event):
restore_settings_export(self.properties)
return ExportHelper.invoke(self, context, event)
@classmethod
def poll(cls, context):
return context.active_object != None
def execute(self, context):
print("Selected: " + context.active_object.name)
if not self.properties.filepath:
raise Exception("filename not set")
save_settings_export(self.properties)
filepath = self.filepath
import io_mesh_threejs.export_threejs
return io_mesh_threejs.export_threejs.save(self, context, **self.properties)
def draw(self, context):
layout = self.layout
row = layout.row()
row.label(text="Geometry:")
row = layout.row()
row.prop(self.properties, "option_vertices")
# row = layout.row()
# row.enabled = self.properties.option_vertices
# row.prop(self.properties, "option_vertices_deltas")
row.prop(self.properties, "option_vertices_truncate")
layout.separator()
row = layout.row()
row.prop(self.properties, "option_faces")
row = layout.row()
row.enabled = self.properties.option_faces
# row.prop(self.properties, "option_faces_deltas")
layout.separator()
row = layout.row()
row.prop(self.properties, "option_normals")
layout.separator()
row = layout.row()
row.prop(self.properties, "option_bones")
row.prop(self.properties, "option_skinning")
layout.separator()
row = layout.row()
row.label(text="Materials:")
row = layout.row()
row.prop(self.properties, "option_uv_coords")
row.prop(self.properties, "option_colors")
row = layout.row()
row.prop(self.properties, "option_materials")
layout.separator()
row = layout.row()
row.label(text="Settings:")
row = layout.row()
row.prop(self.properties, "align_model")
row = layout.row()
row.prop(self.properties, "option_flip_yz")
row.prop(self.properties, "option_scale")
layout.separator()
row = layout.row()
row.label(text="--------- Experimental ---------")
layout.separator()
row = layout.row()
row.label(text="Scene:")
row = layout.row()
row.prop(self.properties, "option_export_scene")
row.prop(self.properties, "option_embed_meshes")
row = layout.row()
row.prop(self.properties, "option_lights")
row.prop(self.properties, "option_cameras")
layout.separator()
row = layout.row()
row.label(text="Animation:")
row = layout.row()
row.prop(self.properties, "option_animation_morph")
row = layout.row()
row.prop(self.properties, "option_animation_skeletal")
row = layout.row()
row.prop(self.properties, "option_frame_index_as_time")
row = layout.row()
row.prop(self.properties, "option_frame_step")
layout.separator()
row = layout.row()
row.label(text="Settings:")
row = layout.row()
row.prop(self.properties, "option_all_meshes")
row = layout.row()
row.prop(self.properties, "option_copy_textures")
row = layout.row()
row.prop(self.properties, "option_url_base_html")
layout.separator()
# ################################################################
# Common
# ################################################################
def menu_func_export(self, context):
default_path = bpy.data.filepath.replace(".blend", ".json")
self.layout.operator(ExportTHREEJS.bl_idname, text="Three.js (.json)").filepath = default_path
def menu_func_import(self, context):
self.layout.operator(ImportTHREEJS.bl_idname, text="Three.js (.json)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func_export)
bpy.types.INFO_MT_file_import.append(menu_func_import)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
if __name__ == "__main__":
register() | mit |
isc-projects/forge | tests/dhcpv4/ddns/test_ddns_tsig_release.py | 1 | 12902 | """DDNS without TSIG"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_control
import srv_msg
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.tsig
@pytest.mark.forward_reverse_remove
def test_ddns4_tsig_sha1_forw_and_rev_release():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(21)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_save_option_count(1, 'server_id')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.client_save_option('server_id')
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.client_add_saved_option_count(1)
srv_msg.client_sets_value('Client', 'ciaddr', '192.168.50.10')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.tsig
@pytest.mark.forward_reverse_remove
def test_ddns4_tsig_forw_and_rev_release_notenabled():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(21)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_save_option_count(1, 'server_id')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.client_save_option('server_id')
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_control.start_srv('DHCP', 'stopped')
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_add_saved_option_count(1)
srv_msg.client_sets_value('Client', 'ciaddr', '192.168.50.10')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '192.168.50.10')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.tsig
@pytest.mark.reverse_remove
def test_ddns4_tsig_sha1_rev_release():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.10-192.168.50.10')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(21)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_save_option_count(1, 'server_id')
srv_msg.client_add_saved_option_count(1)
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.10')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.10')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.client_save_option('server_id')
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '10.50.168.192.in-addr.arpa.')
misc.test_procedure()
srv_msg.client_add_saved_option_count(1)
srv_msg.client_sets_value('Client', 'ciaddr', '192.168.50.10')
srv_msg.client_send_msg('RELEASE')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('10.50.168.192.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
| isc |
andnovar/networkx | networkx/algorithms/community/tests/test_kclique.py | 94 | 2152 | #!/usr/bin/env python
from nose.tools import *
import networkx as nx
from itertools import combinations
from networkx import k_clique_communities
def test_overlaping_K5():
G = nx.Graph()
G.add_edges_from(combinations(range(5), 2)) # Add a five clique
G.add_edges_from(combinations(range(2,7), 2)) # Add another five clique
c = list(nx.k_clique_communities(G, 4))
assert_equal(c,[frozenset([0, 1, 2, 3, 4, 5, 6])])
c= list(nx.k_clique_communities(G, 5))
assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([2,3,4,5,6])]))
def test_isolated_K5():
G = nx.Graph()
G.add_edges_from(combinations(range(0,5), 2)) # Add a five clique
G.add_edges_from(combinations(range(5,10), 2)) # Add another five clique
c= list(nx.k_clique_communities(G, 5))
assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([5,6,7,8,9])]))
def test_zachary():
z = nx.karate_club_graph()
# clique percolation with k=2 is just connected components
zachary_k2_ground_truth = set([frozenset(z.nodes())])
zachary_k3_ground_truth = set([frozenset([0, 1, 2, 3, 7, 8, 12, 13, 14,
15, 17, 18, 19, 20, 21, 22, 23,
26, 27, 28, 29, 30, 31, 32, 33]),
frozenset([0, 4, 5, 6, 10, 16]),
frozenset([24, 25, 31])])
zachary_k4_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13]),
frozenset([8, 32, 30, 33]),
frozenset([32, 33, 29, 23])])
zachary_k5_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13])])
zachary_k6_ground_truth = set([])
assert set(k_clique_communities(z, 2)) == zachary_k2_ground_truth
assert set(k_clique_communities(z, 3)) == zachary_k3_ground_truth
assert set(k_clique_communities(z, 4)) == zachary_k4_ground_truth
assert set(k_clique_communities(z, 5)) == zachary_k5_ground_truth
assert set(k_clique_communities(z, 6)) == zachary_k6_ground_truth
@raises(nx.NetworkXError)
def test_bad_k():
c = list(k_clique_communities(nx.Graph(),1))
| bsd-3-clause |
HighwindONE/android_kernel_lge_msm8226 | tools/perf/util/setup.py | 4998 | 1330 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
MackZxh/OCA-Choice | project/sale_order_project/models/sale.py | 19 | 2400 | # -*- coding: utf-8 -*-
###############################################################################
#
# Module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com).
# Copyright (C) 2010-2013 Akretion LDTA (<http://www.akretion.com>)
# @author Sébastien BEAU <sebastien.beau@akretion.com>
# @author Benoît GUILLOT <benoit.guillot@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from openerp import api, fields, models
from datetime import date
class SaleOrder(models.Model):
_inherit = "sale.order"
@api.one
@api.depends('project_id')
def _compute_related_project_id(self):
self.related_project_id = (
self.project_id.use_tasks and
self.env['project.project'].search(
[('analytic_account_id', '=', self.project_id.id)],
limit=1)[:1])
related_project_id = fields.Many2one(
comodel_name='project.project', string='Project',
compute='_compute_related_project_id')
@api.model
def _prepare_project_vals(self, order):
name = u" %s - %s - %s" % (
order.partner_id.name,
date.today().year,
order.name)
return {
'user_id': order.user_id.id,
'name': name,
'partner_id': order.partner_id.id,
}
@api.multi
def action_create_project(self):
project_obj = self.env['project.project']
for order in self:
vals = self._prepare_project_vals(order)
project = project_obj.create(vals)
order.write({
'project_id': project.analytic_account_id.id
})
return True
| lgpl-3.0 |
alejo8591/culttume2 | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/connection.py | 187 | 5659 | # urllib3/connection.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import socket
from socket import timeout as SocketTimeout
try: # Python 3
from http.client import HTTPConnection as _HTTPConnection, HTTPException
except ImportError:
from httplib import HTTPConnection as _HTTPConnection, HTTPException
class DummyConnection(object):
"Used to detect a failed ConnectionCls import."
pass
try: # Compiled with SSL?
ssl = None
HTTPSConnection = DummyConnection
class BaseSSLError(BaseException):
pass
try: # Python 3
from http.client import HTTPSConnection as _HTTPSConnection
except ImportError:
from httplib import HTTPSConnection as _HTTPSConnection
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
pass
from .exceptions import (
ConnectTimeoutError,
)
from .packages.ssl_match_hostname import match_hostname
from .util import (
assert_fingerprint,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
port_by_scheme = {
'http': 80,
'https': 443,
}
class HTTPConnection(_HTTPConnection, object):
default_port = port_by_scheme['http']
# By default, disable Nagle's Algorithm.
tcp_nodelay = 1
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it
:return: a new socket connection
"""
try:
conn = socket.create_connection(
(self.host, self.port),
self.timeout,
self.source_address,
)
except AttributeError: # Python 2.6
conn = socket.create_connection(
(self.host, self.port),
self.timeout,
)
conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
self.tcp_nodelay)
return conn
def _prepare_conn(self, conn):
self.sock = conn
if self._tunnel_host:
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
try:
HTTPConnection.__init__(self, host, port, strict, timeout, source_address)
except TypeError: # Python 2.6
HTTPConnection.__init__(self, host, port, strict, timeout)
self.key_file = key_file
self.cert_file = cert_file
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ssl_version = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None):
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def connect(self):
# Add certificate verification
try:
sock = socket.create_connection(
address=(self.host, self.port),
timeout=self.timeout,
)
except SocketTimeout:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
self.tcp_nodelay)
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
self.sock = sock
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
server_hostname=self.host,
ssl_version=resolved_ssl_version)
if resolved_cert_reqs != ssl.CERT_NONE:
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif self.assert_hostname is not False:
match_hostname(self.sock.getpeercert(),
self.assert_hostname or self.host)
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
| mit |
tuxfux-hlp-notes/python-batches | archieves/batch-64/09-modules/myenv/lib/python2.7/site-packages/django/contrib/admin/validation.py | 82 | 23810 | from django.contrib.admin.utils import NotRelationField, get_fields_from_path
from django.core.exceptions import FieldDoesNotExist, ImproperlyConfigured
from django.db import models
from django.forms.models import (
BaseModelForm, BaseModelFormSet, _get_foreign_key,
)
"""
Does basic ModelAdmin option validation. Calls custom validation
classmethod in the end if it is provided in cls. The signature of the
custom validation classmethod should be: def validate(cls, model).
"""
__all__ = ['BaseValidator', 'InlineValidator']
class BaseValidator(object):
def validate(self, cls, model):
for m in dir(self):
if m.startswith('validate_'):
getattr(self, m)(cls, model)
def check_field_spec(self, cls, model, flds, label):
"""
Validate the fields specification in `flds` from a ModelAdmin subclass
`cls` for the `model` model. Use `label` for reporting problems to the user.
The fields specification can be a ``fields`` option or a ``fields``
sub-option from a ``fieldsets`` option component.
"""
for fields in flds:
# The entry in fields might be a tuple. If it is a standalone
# field, make it into a tuple to make processing easier.
if type(fields) != tuple:
fields = (fields,)
for field in fields:
if field in cls.readonly_fields:
# Stuff can be put in fields that isn't actually a
# model field if it's in readonly_fields,
# readonly_fields will handle the validation of such
# things.
continue
try:
f = model._meta.get_field(field)
except FieldDoesNotExist:
# If we can't find a field on the model that matches, it could be an
# extra field on the form; nothing to check so move on to the next field.
continue
if isinstance(f, models.ManyToManyField) and not f.rel.through._meta.auto_created:
raise ImproperlyConfigured("'%s.%s' "
"can't include the ManyToManyField field '%s' because "
"'%s' manually specifies a 'through' model." % (
cls.__name__, label, field, field))
def validate_raw_id_fields(self, cls, model):
" Validate that raw_id_fields only contains field names that are listed on the model. "
if hasattr(cls, 'raw_id_fields'):
check_isseq(cls, 'raw_id_fields', cls.raw_id_fields)
for idx, field in enumerate(cls.raw_id_fields):
f = get_field(cls, model, 'raw_id_fields', field)
if not isinstance(f, (models.ForeignKey, models.ManyToManyField)):
raise ImproperlyConfigured("'%s.raw_id_fields[%d]', '%s' must "
"be either a ForeignKey or ManyToManyField."
% (cls.__name__, idx, field))
def validate_fields(self, cls, model):
" Validate that fields only refer to existing fields, doesn't contain duplicates. "
# fields
if cls.fields: # default value is None
check_isseq(cls, 'fields', cls.fields)
self.check_field_spec(cls, model, cls.fields, 'fields')
if cls.fieldsets:
raise ImproperlyConfigured('Both fieldsets and fields are specified in %s.' % cls.__name__)
if len(cls.fields) > len(set(cls.fields)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fields' % cls.__name__)
def validate_fieldsets(self, cls, model):
" Validate that fieldsets is properly formatted and doesn't contain duplicates. "
from django.contrib.admin.options import flatten_fieldsets
if cls.fieldsets: # default value is None
check_isseq(cls, 'fieldsets', cls.fieldsets)
for idx, fieldset in enumerate(cls.fieldsets):
check_isseq(cls, 'fieldsets[%d]' % idx, fieldset)
if len(fieldset) != 2:
raise ImproperlyConfigured("'%s.fieldsets[%d]' does not "
"have exactly two elements." % (cls.__name__, idx))
check_isdict(cls, 'fieldsets[%d][1]' % idx, fieldset[1])
if 'fields' not in fieldset[1]:
raise ImproperlyConfigured("'fields' key is required in "
"%s.fieldsets[%d][1] field options dict."
% (cls.__name__, idx))
self.check_field_spec(cls, model, fieldset[1]['fields'], "fieldsets[%d][1]['fields']" % idx)
flattened_fieldsets = flatten_fieldsets(cls.fieldsets)
if len(flattened_fieldsets) > len(set(flattened_fieldsets)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fieldsets' % cls.__name__)
def validate_exclude(self, cls, model):
" Validate that exclude is a sequence without duplicates. "
if cls.exclude: # default value is None
check_isseq(cls, 'exclude', cls.exclude)
if len(cls.exclude) > len(set(cls.exclude)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.exclude' % cls.__name__)
def validate_form(self, cls, model):
" Validate that form subclasses BaseModelForm. "
if hasattr(cls, 'form') and not issubclass(cls.form, BaseModelForm):
raise ImproperlyConfigured("%s.form does not inherit from "
"BaseModelForm." % cls.__name__)
def validate_filter_vertical(self, cls, model):
" Validate that filter_vertical is a sequence of field names. "
if hasattr(cls, 'filter_vertical'):
check_isseq(cls, 'filter_vertical', cls.filter_vertical)
for idx, field in enumerate(cls.filter_vertical):
f = get_field(cls, model, 'filter_vertical', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_vertical[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
def validate_filter_horizontal(self, cls, model):
" Validate that filter_horizontal is a sequence of field names. "
if hasattr(cls, 'filter_horizontal'):
check_isseq(cls, 'filter_horizontal', cls.filter_horizontal)
for idx, field in enumerate(cls.filter_horizontal):
f = get_field(cls, model, 'filter_horizontal', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_horizontal[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
def validate_radio_fields(self, cls, model):
" Validate that radio_fields is a dictionary of choice or foreign key fields. "
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if hasattr(cls, 'radio_fields'):
check_isdict(cls, 'radio_fields', cls.radio_fields)
for field, val in cls.radio_fields.items():
f = get_field(cls, model, 'radio_fields', field)
if not (isinstance(f, models.ForeignKey) or f.choices):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither an instance of ForeignKey nor does "
"have choices set." % (cls.__name__, field))
if val not in (HORIZONTAL, VERTICAL):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither admin.HORIZONTAL nor admin.VERTICAL."
% (cls.__name__, field))
def validate_prepopulated_fields(self, cls, model):
" Validate that prepopulated_fields if a dictionary containing allowed field types. "
# prepopulated_fields
if hasattr(cls, 'prepopulated_fields'):
check_isdict(cls, 'prepopulated_fields', cls.prepopulated_fields)
for field, val in cls.prepopulated_fields.items():
f = get_field(cls, model, 'prepopulated_fields', field)
if isinstance(f, (models.DateTimeField, models.ForeignKey,
models.ManyToManyField)):
raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' "
"is either a DateTimeField, ForeignKey or "
"ManyToManyField. This isn't allowed."
% (cls.__name__, field))
check_isseq(cls, "prepopulated_fields['%s']" % field, val)
for idx, f in enumerate(val):
get_field(cls, model, "prepopulated_fields['%s'][%d]" % (field, idx), f)
def validate_view_on_site_url(self, cls, model):
if hasattr(cls, 'view_on_site'):
if not callable(cls.view_on_site) and not isinstance(cls.view_on_site, bool):
raise ImproperlyConfigured("%s.view_on_site is not a callable or a boolean value." % cls.__name__)
def validate_ordering(self, cls, model):
" Validate that ordering refers to existing fields or is random. "
# ordering = None
if cls.ordering:
check_isseq(cls, 'ordering', cls.ordering)
for idx, field in enumerate(cls.ordering):
if field == '?' and len(cls.ordering) != 1:
raise ImproperlyConfigured("'%s.ordering' has the random "
"ordering marker '?', but contains other fields as "
"well. Please either remove '?' or the other fields."
% cls.__name__)
if field == '?':
continue
if field.startswith('-'):
field = field[1:]
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
if '__' in field:
continue
get_field(cls, model, 'ordering[%d]' % idx, field)
def validate_readonly_fields(self, cls, model):
" Validate that readonly_fields refers to proper attribute or field. "
if hasattr(cls, "readonly_fields"):
check_isseq(cls, "readonly_fields", cls.readonly_fields)
for idx, field in enumerate(cls.readonly_fields):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
model._meta.get_field(field)
except FieldDoesNotExist:
raise ImproperlyConfigured(
"%s.readonly_fields[%d], %r is not a callable or "
"an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name)
)
class ModelAdminValidator(BaseValidator):
def validate_save_as(self, cls, model):
" Validate save_as is a boolean. "
check_type(cls, 'save_as', bool)
def validate_save_on_top(self, cls, model):
" Validate save_on_top is a boolean. "
check_type(cls, 'save_on_top', bool)
def validate_inlines(self, cls, model):
" Validate inline model admin classes. "
from django.contrib.admin.options import BaseModelAdmin
if hasattr(cls, 'inlines'):
check_isseq(cls, 'inlines', cls.inlines)
for idx, inline in enumerate(cls.inlines):
if not issubclass(inline, BaseModelAdmin):
raise ImproperlyConfigured("'%s.inlines[%d]' does not inherit "
"from BaseModelAdmin." % (cls.__name__, idx))
if not inline.model:
raise ImproperlyConfigured("'model' is a required attribute "
"of '%s.inlines[%d]'." % (cls.__name__, idx))
if not issubclass(inline.model, models.Model):
raise ImproperlyConfigured("'%s.inlines[%d].model' does not "
"inherit from models.Model." % (cls.__name__, idx))
inline.validate(inline.model)
self.check_inline(inline, model)
def check_inline(self, cls, parent_model):
" Validate inline class's fk field is not excluded. "
fk = _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name, can_fail=True)
if hasattr(cls, 'exclude') and cls.exclude:
if fk and fk.name in cls.exclude:
raise ImproperlyConfigured("%s cannot exclude the field "
"'%s' - this is the foreign key to the parent model "
"%s.%s." % (cls.__name__, fk.name, parent_model._meta.app_label, parent_model.__name__))
def validate_list_display(self, cls, model):
" Validate that list_display only contains fields or usable attributes. "
if hasattr(cls, 'list_display'):
check_isseq(cls, 'list_display', cls.list_display)
for idx, field in enumerate(cls.list_display):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
model._meta.get_field(field)
except FieldDoesNotExist:
raise ImproperlyConfigured(
"%s.list_display[%d], %r is not a callable or "
"an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name)
)
else:
# getattr(model, field) could be an X_RelatedObjectsDescriptor
f = fetch_attr(cls, model, "list_display[%d]" % idx, field)
if isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured(
"'%s.list_display[%d]', '%s' is a ManyToManyField "
"which is not supported."
% (cls.__name__, idx, field)
)
def validate_list_display_links(self, cls, model):
" Validate that list_display_links either is None or a unique subset of list_display."
if hasattr(cls, 'list_display_links'):
if cls.list_display_links is None:
return
check_isseq(cls, 'list_display_links', cls.list_display_links)
for idx, field in enumerate(cls.list_display_links):
if field not in cls.list_display:
raise ImproperlyConfigured("'%s.list_display_links[%d]' "
"refers to '%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field))
def validate_list_filter(self, cls, model):
"""
Validate that list_filter is a sequence of one of three options:
1: 'field' - a basic field filter, possibly w/ relationships (eg, 'field__rel')
2: ('field', SomeFieldListFilter) - a field-based list filter class
3: SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import ListFilter, FieldListFilter
if hasattr(cls, 'list_filter'):
check_isseq(cls, 'list_filter', cls.list_filter)
for idx, item in enumerate(cls.list_filter):
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not issubclass(item, ListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is not a descendant of ListFilter."
% (cls.__name__, idx, item.__name__))
# ... but not a FieldListFilter.
if issubclass(item, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is of type FieldListFilter but is not"
" associated with a field name."
% (cls.__name__, idx, item.__name__))
else:
if isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not issubclass(list_filter_class, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d][1]'"
" is '%s' which is not of type FieldListFilter."
% (cls.__name__, idx, list_filter_class.__name__))
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(model, field)
except (NotRelationField, FieldDoesNotExist):
raise ImproperlyConfigured("'%s.list_filter[%d]' refers to '%s'"
" which does not refer to a Field."
% (cls.__name__, idx, field))
def validate_list_select_related(self, cls, model):
" Validate that list_select_related is a boolean, a list or a tuple. "
list_select_related = getattr(cls, 'list_select_related', None)
if list_select_related:
types = (bool, tuple, list)
if not isinstance(list_select_related, types):
raise ImproperlyConfigured("'%s.list_select_related' should be "
"either a bool, a tuple or a list" %
cls.__name__)
def validate_list_per_page(self, cls, model):
" Validate that list_per_page is an integer. "
check_type(cls, 'list_per_page', int)
def validate_list_max_show_all(self, cls, model):
" Validate that list_max_show_all is an integer. "
check_type(cls, 'list_max_show_all', int)
def validate_list_editable(self, cls, model):
"""
Validate that list_editable is a sequence of editable fields from
list_display without first element.
"""
if hasattr(cls, 'list_editable') and cls.list_editable:
check_isseq(cls, 'list_editable', cls.list_editable)
for idx, field_name in enumerate(cls.list_editable):
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', not defined on %s.%s."
% (cls.__name__, idx, field_name, model._meta.app_label, model.__name__))
if field_name not in cls.list_display:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to "
"'%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field_name))
if cls.list_display_links is not None:
if field_name in cls.list_display_links:
raise ImproperlyConfigured("'%s' cannot be in both '%s.list_editable'"
" and '%s.list_display_links'"
% (field_name, cls.__name__, cls.__name__))
if not cls.list_display_links and cls.list_display[0] in cls.list_editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to"
" the first field in list_display, '%s', which can't be"
" used unless list_display_links is set."
% (cls.__name__, idx, cls.list_display[0]))
if not field.editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', which isn't editable through the admin."
% (cls.__name__, idx, field_name))
def validate_search_fields(self, cls, model):
" Validate search_fields is a sequence. "
if hasattr(cls, 'search_fields'):
check_isseq(cls, 'search_fields', cls.search_fields)
def validate_date_hierarchy(self, cls, model):
" Validate that date_hierarchy refers to DateField or DateTimeField. "
if cls.date_hierarchy:
f = get_field(cls, model, 'date_hierarchy', cls.date_hierarchy)
if not isinstance(f, (models.DateField, models.DateTimeField)):
raise ImproperlyConfigured("'%s.date_hierarchy is "
"neither an instance of DateField nor DateTimeField."
% cls.__name__)
class InlineValidator(BaseValidator):
def validate_fk_name(self, cls, model):
" Validate that fk_name refers to a ForeignKey. "
if cls.fk_name: # default value is None
f = get_field(cls, model, 'fk_name', cls.fk_name)
if not isinstance(f, models.ForeignKey):
raise ImproperlyConfigured("'%s.fk_name is not an instance of "
"models.ForeignKey." % cls.__name__)
def validate_extra(self, cls, model):
" Validate that extra is an integer. "
check_type(cls, 'extra', int)
def validate_max_num(self, cls, model):
" Validate that max_num is an integer. "
check_type(cls, 'max_num', int)
def validate_formset(self, cls, model):
" Validate formset is a subclass of BaseModelFormSet. "
if hasattr(cls, 'formset') and not issubclass(cls.formset, BaseModelFormSet):
raise ImproperlyConfigured("'%s.formset' does not inherit from "
"BaseModelFormSet." % cls.__name__)
def check_type(cls, attr, type_):
if getattr(cls, attr, None) is not None and not isinstance(getattr(cls, attr), type_):
raise ImproperlyConfigured("'%s.%s' should be a %s."
% (cls.__name__, attr, type_.__name__))
def check_isseq(cls, label, obj):
if not isinstance(obj, (list, tuple)):
raise ImproperlyConfigured("'%s.%s' must be a list or tuple." % (cls.__name__, label))
def check_isdict(cls, label, obj):
if not isinstance(obj, dict):
raise ImproperlyConfigured("'%s.%s' must be a dictionary." % (cls.__name__, label))
def get_field(cls, model, label, field):
try:
return model._meta.get_field(field)
except FieldDoesNotExist:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that is missing from model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__))
def fetch_attr(cls, model, label, field):
try:
return model._meta.get_field(field)
except FieldDoesNotExist:
pass
try:
return getattr(model, field)
except AttributeError:
raise ImproperlyConfigured(
"'%s.%s' refers to '%s' that is neither a field, method or "
"property of model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__)
)
| gpl-3.0 |
laiqiqi886/kbengine | kbe/res/scripts/common/Lib/test/test_codecencodings_jp.py | 88 | 4981 | #
# test_codecencodings_jp.py
# Codec encoding tests for Japanese encodings.
#
from test import support
from test import multibytecodec_support
import unittest
class Test_CP932(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'cp932'
tstring = multibytecodec_support.load_teststring('shift_jis')
codectests = (
# invalid bytes
(b"abc\x81\x00\x81\x00\x82\x84", "strict", None),
(b"abc\xf8", "strict", None),
(b"abc\x81\x00\x82\x84", "replace", "abc\ufffd\x00\uff44"),
(b"abc\x81\x00\x82\x84\x88", "replace", "abc\ufffd\x00\uff44\ufffd"),
(b"abc\x81\x00\x82\x84", "ignore", "abc\x00\uff44"),
(b"ab\xEBxy", "replace", "ab\uFFFDxy"),
(b"ab\xF0\x39xy", "replace", "ab\uFFFD9xy"),
(b"ab\xEA\xF0xy", "replace", 'ab\ufffd\ue038y'),
# sjis vs cp932
(b"\\\x7e", "replace", "\\\x7e"),
(b"\x81\x5f\x81\x61\x81\x7c", "replace", "\uff3c\u2225\uff0d"),
)
euc_commontests = (
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\ufffd\u7956"),
(b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\ufffd\u7956\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u7956"),
(b"abc\xc8", "strict", None),
(b"abc\x8f\x83\x83", "replace", "abc\ufffd\ufffd\ufffd"),
(b"\x82\xFCxy", "replace", "\ufffd\ufffdxy"),
(b"\xc1\x64", "strict", None),
(b"\xa1\xc0", "strict", "\uff3c"),
(b"\xa1\xc0\\", "strict", "\uff3c\\"),
(b"\x8eXY", "replace", "\ufffdXY"),
)
class Test_EUC_JIS_2004(multibytecodec_support.TestBase,
unittest.TestCase):
encoding = 'euc_jis_2004'
tstring = multibytecodec_support.load_teststring('euc_jisx0213')
codectests = euc_commontests
xmlcharnametest = (
"\xab\u211c\xbb = \u2329\u1234\u232a",
b"\xa9\xa8ℜ\xa9\xb2 = ⟨ሴ⟩"
)
class Test_EUC_JISX0213(multibytecodec_support.TestBase,
unittest.TestCase):
encoding = 'euc_jisx0213'
tstring = multibytecodec_support.load_teststring('euc_jisx0213')
codectests = euc_commontests
xmlcharnametest = (
"\xab\u211c\xbb = \u2329\u1234\u232a",
b"\xa9\xa8ℜ\xa9\xb2 = ⟨ሴ⟩"
)
class Test_EUC_JP_COMPAT(multibytecodec_support.TestBase,
unittest.TestCase):
encoding = 'euc_jp'
tstring = multibytecodec_support.load_teststring('euc_jp')
codectests = euc_commontests + (
("\xa5", "strict", b"\x5c"),
("\u203e", "strict", b"\x7e"),
)
shiftjis_commonenctests = (
(b"abc\x80\x80\x82\x84", "strict", None),
(b"abc\xf8", "strict", None),
(b"abc\x80\x80\x82\x84def", "ignore", "abc\uff44def"),
)
class Test_SJIS_COMPAT(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'shift_jis'
tstring = multibytecodec_support.load_teststring('shift_jis')
codectests = shiftjis_commonenctests + (
(b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\ufffd\uff44"),
(b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\ufffd\uff44\ufffd"),
(b"\\\x7e", "strict", "\\\x7e"),
(b"\x81\x5f\x81\x61\x81\x7c", "strict", "\uff3c\u2016\u2212"),
(b"abc\x81\x39", "replace", "abc\ufffd9"),
(b"abc\xEA\xFC", "replace", "abc\ufffd\ufffd"),
(b"abc\xFF\x58", "replace", "abc\ufffdX"),
)
class Test_SJIS_2004(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'shift_jis_2004'
tstring = multibytecodec_support.load_teststring('shift_jis')
codectests = shiftjis_commonenctests + (
(b"\\\x7e", "strict", "\xa5\u203e"),
(b"\x81\x5f\x81\x61\x81\x7c", "strict", "\\\u2016\u2212"),
(b"abc\xEA\xFC", "strict", "abc\u64bf"),
(b"\x81\x39xy", "replace", "\ufffd9xy"),
(b"\xFF\x58xy", "replace", "\ufffdXxy"),
(b"\x80\x80\x82\x84xy", "replace", "\ufffd\ufffd\uff44xy"),
(b"\x80\x80\x82\x84\x88xy", "replace", "\ufffd\ufffd\uff44\u5864y"),
(b"\xFC\xFBxy", "replace", '\ufffd\u95b4y'),
)
xmlcharnametest = (
"\xab\u211c\xbb = \u2329\u1234\u232a",
b"\x85Gℜ\x85Q = ⟨ሴ⟩"
)
class Test_SJISX0213(multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'shift_jisx0213'
tstring = multibytecodec_support.load_teststring('shift_jisx0213')
codectests = shiftjis_commonenctests + (
(b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\ufffd\uff44"),
(b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\ufffd\uff44\ufffd"),
# sjis vs cp932
(b"\\\x7e", "replace", "\xa5\u203e"),
(b"\x81\x5f\x81\x61\x81\x7c", "replace", "\x5c\u2016\u2212"),
)
xmlcharnametest = (
"\xab\u211c\xbb = \u2329\u1234\u232a",
b"\x85Gℜ\x85Q = ⟨ሴ⟩"
)
def test_main():
support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| lgpl-3.0 |
openfun/edx-platform | lms/djangoapps/mobile_api/social_facebook/friends/views.py | 85 | 2439 | """
Views for friends info API
"""
from rest_framework import generics, status
from rest_framework.response import Response
from opaque_keys.edx.keys import CourseKey
from student.models import CourseEnrollment
from ...utils import mobile_view
from ..utils import get_friends_from_facebook, get_linked_edx_accounts, share_with_facebook_friends
from lms.djangoapps.mobile_api.social_facebook.friends import serializers
@mobile_view()
class FriendsInCourse(generics.ListAPIView):
"""
**Use Case**
API endpoint that returns all the users friends that are in the course specified.
Note that only friends that allow their courses to be shared will be included.
**Example request**:
GET /api/mobile/v0.5/social/facebook/friends/course/<course_id>
where course_id is in the form of /edX/DemoX/Demo_Course
**Response Values**
{
"friends": [
{
"name": "test",
"id": "12345",
},
...
]
}
"""
serializer_class = serializers.FriendsInCourseSerializer
def list(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.GET, files=request.FILES)
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Get all the user's FB friends
result = get_friends_from_facebook(serializer)
if not isinstance(result, list):
return result
def is_member(friend, course_key):
"""
Return true if friend is a member of the course specified by the course_key
"""
return CourseEnrollment.objects.filter(
course_id=course_key,
user_id=friend['edX_id']
).count() == 1
# For each friend check if they are a linked edX user
friends_with_edx_users = get_linked_edx_accounts(result)
# Filter by sharing preferences and enrollment in course
course_key = CourseKey.from_string(kwargs['course_id'])
friends_with_sharing_in_course = [
{'id': friend['id'], 'name': friend['name']}
for friend in friends_with_edx_users
if share_with_facebook_friends(friend) and is_member(friend, course_key)
]
return Response({'friends': friends_with_sharing_in_course})
| agpl-3.0 |
madan96/sympy | sympy/integrals/rde.py | 91 | 26625 | """
Algorithms for solving the Risch differential equation.
Given a differential field K of characteristic 0 that is a simple
monomial extension of a base field k and f, g in K, the Risch
Differential Equation problem is to decide if there exist y in K such
that Dy + f*y == g and to find one if there are some. If t is a
monomial over k and the coefficients of f and g are in k(t), then y is
in k(t), and the outline of the algorithm here is given as:
1. Compute the normal part n of the denominator of y. The problem is
then reduced to finding y' in k<t>, where y == y'/n.
2. Compute the special part s of the denominator of y. The problem is
then reduced to finding y'' in k[t], where y == y''/(n*s)
3. Bound the degree of y''.
4. Reduce the equation Dy + f*y == g to a similar equation with f, g in
k[t].
5. Find the solutions in k[t] of bounded degree of the reduced equation.
See Chapter 6 of "Symbolic Integration I: Transcendental Functions" by
Manuel Bronstein. See also the docstring of risch.py.
"""
from __future__ import print_function, division
from operator import mul
from sympy.core import oo
from sympy.core.compatibility import reduce
from sympy.core.symbol import Dummy
from sympy.polys import Poly, gcd, ZZ, cancel
from sympy.integrals.risch import (gcdex_diophantine, frac_in, derivation,
splitfactor, NonElementaryIntegralException, DecrementLevel)
# TODO: Add messages to NonElementaryIntegralException errors
def order_at(a, p, t):
"""
Computes the order of a at p, with respect to t.
For a, p in k[t], the order of a at p is defined as nu_p(a) = max({n
in Z+ such that p**n|a}), where a != 0. If a == 0, nu_p(a) = +oo.
To compute the order at a rational function, a/b, use the fact that
nu_p(a/b) == nu_p(a) - nu_p(b).
"""
if a.is_zero:
return oo
if p == Poly(t, t):
return a.as_poly(t).ET()[0][0]
# Uses binary search for calculating the power. power_list collects the tuples
# (p^k,k) where each k is some power of 2. After deciding the largest k
# such that k is power of 2 and p^k|a the loop iteratively calculates
# the actual power.
power_list = []
p1 = p
r = a.rem(p1)
tracks_power = 1
while r.is_zero:
power_list.append((p1,tracks_power))
p1 = p1*p1
tracks_power *= 2
r = a.rem(p1)
n = 0
product = Poly(1, t)
while len(power_list) != 0:
final = power_list.pop()
productf = product*final[0]
r = a.rem(productf)
if r.is_zero:
n += final[1]
product = productf
return n
def order_at_oo(a, d, t):
"""
Computes the order of a/d at oo (infinity), with respect to t.
For f in k(t), the order or f at oo is defined as deg(d) - deg(a), where
f == a/d.
"""
if a.is_zero:
return oo
return d.degree(t) - a.degree(t)
def weak_normalizer(a, d, DE, z=None):
"""
Weak normalization.
Given a derivation D on k[t] and f == a/d in k(t), return q in k[t]
such that f - Dq/q is weakly normalized with respect to t.
f in k(t) is said to be "weakly normalized" with respect to t if
residue_p(f) is not a positive integer for any normal irreducible p
in k[t] such that f is in R_p (Definition 6.1.1). If f has an
elementary integral, this is equivalent to no logarithm of
integral(f) whose argument depends on t has a positive integer
coefficient, where the arguments of the logarithms not in k(t) are
in k[t].
Returns (q, f - Dq/q)
"""
z = z or Dummy('z')
dn, ds = splitfactor(d, DE)
# Compute d1, where dn == d1*d2**2*...*dn**n is a square-free
# factorization of d.
g = gcd(dn, dn.diff(DE.t))
d_sqf_part = dn.quo(g)
d1 = d_sqf_part.quo(gcd(d_sqf_part, g))
a1, b = gcdex_diophantine(d.quo(d1).as_poly(DE.t), d1.as_poly(DE.t),
a.as_poly(DE.t))
r = (a - Poly(z, DE.t)*derivation(d1, DE)).as_poly(DE.t).resultant(
d1.as_poly(DE.t))
r = Poly(r, z)
if not r.has(z):
return (Poly(1, DE.t), (a, d))
N = [i for i in r.real_roots() if i in ZZ and i > 0]
q = reduce(mul, [gcd(a - Poly(n, DE.t)*derivation(d1, DE), d1) for n in N],
Poly(1, DE.t))
dq = derivation(q, DE)
sn = q*a - d*dq
sd = q*d
sn, sd = sn.cancel(sd, include=True)
return (q, (sn, sd))
def normal_denom(fa, fd, ga, gd, DE):
"""
Normal part of the denominator.
Given a derivation D on k[t] and f, g in k(t) with f weakly
normalized with respect to t, either raise NonElementaryIntegralException,
in which case the equation Dy + f*y == g has no solution in k(t), or the
quadruplet (a, b, c, h) such that a, h in k[t], b, c in k<t>, and for any
solution y in k(t) of Dy + f*y == g, q = y*h in k<t> satisfies
a*Dq + b*q == c.
This constitutes step 1 in the outline given in the rde.py docstring.
"""
dn, ds = splitfactor(fd, DE)
en, es = splitfactor(gd, DE)
p = dn.gcd(en)
h = en.gcd(en.diff(DE.t)).quo(p.gcd(p.diff(DE.t)))
a = dn*h
c = a*h
if c.div(en)[1]:
# en does not divide dn*h**2
raise NonElementaryIntegralException
ca = c*ga
ca, cd = ca.cancel(gd, include=True)
ba = a*fa - dn*derivation(h, DE)*fd
ba, bd = ba.cancel(fd, include=True)
# (dn*h, dn*h*f - dn*Dh, dn*h**2*g, h)
return (a, (ba, bd), (ca, cd), h)
def special_denom(a, ba, bd, ca, cd, DE, case='auto'):
"""
Special part of the denominator.
case is one of {'exp', 'tan', 'primitive'} for the hyperexponential,
hypertangent, and primitive cases, respectively. For the
hyperexponential (resp. hypertangent) case, given a derivation D on
k[t] and a in k[t], b, c, in k<t> with Dt/t in k (resp. Dt/(t**2 + 1) in
k, sqrt(-1) not in k), a != 0, and gcd(a, t) == 1 (resp.
gcd(a, t**2 + 1) == 1), return the quadruplet (A, B, C, 1/h) such that
A, B, C, h in k[t] and for any solution q in k<t> of a*Dq + b*q == c,
r = qh in k[t] satisfies A*Dr + B*r == C.
For case == 'primitive', k<t> == k[t], so it returns (a, b, c, 1) in
this case.
This constitutes step 2 of the outline given in the rde.py docstring.
"""
from sympy.integrals.prde import parametric_log_deriv
# TODO: finish writing this and write tests
if case == 'auto':
case = DE.case
if case == 'exp':
p = Poly(DE.t, DE.t)
elif case == 'tan':
p = Poly(DE.t**2 + 1, DE.t)
elif case in ['primitive', 'base']:
B = ba.to_field().quo(bd)
C = ca.to_field().quo(cd)
return (a, B, C, Poly(1, DE.t))
else:
raise ValueError("case must be one of {'exp', 'tan', 'primitive', "
"'base'}, not %s." % case)
nb = order_at(ba, p, DE.t) - order_at(bd, p, DE.t)
nc = order_at(ca, p, DE.t) - order_at(cd, p, DE.t)
n = min(0, nc - min(0, nb))
if not nb:
# Possible cancellation.
if case == 'exp':
dcoeff = DE.d.quo(Poly(DE.t, DE.t))
with DecrementLevel(DE): # We are guaranteed to not have problems,
# because case != 'base'.
alphaa, alphad = frac_in(-ba.eval(0)/bd.eval(0)/a.eval(0), DE.t)
etaa, etad = frac_in(dcoeff, DE.t)
A = parametric_log_deriv(alphaa, alphad, etaa, etad, DE)
if A is not None:
a, m, z = A
if a == 1:
n = min(n, m)
elif case == 'tan':
dcoeff = DE.d.quo(Poly(DE.t**2+1, DE.t))
with DecrementLevel(DE): # We are guaranteed to not have problems,
# because case != 'base'.
alphaa, alphad = frac_in(im(-ba.eval(sqrt(-1))/bd.eval(sqrt(-1))/a.eval(sqrt(-1))), DE.t)
betaa, betad = frac_in(re(-ba.eval(sqrt(-1))/bd.eval(sqrt(-1))/a.eval(sqrt(-1))), DE.t)
etaa, etad = frac_in(dcoeff, DE.t)
if recognize_log_derivative(2*betaa, betad, DE):
A = parametric_log_deriv(alphaa*sqrt(-1)*betad+alphad*betaa, alphad*betad, etaa, etad, DE)
if A is not None:
a, m, z = A
if a == 1:
n = min(n, m)
N = max(0, -nb, n - nc)
pN = p**N
pn = p**-n
A = a*pN
B = ba*pN.quo(bd) + Poly(n, DE.t)*a*derivation(p, DE).quo(p)*pN
C = (ca*pN*pn).quo(cd)
h = pn
# (a*p**N, (b + n*a*Dp/p)*p**N, c*p**(N - n), p**-n)
return (A, B, C, h)
def bound_degree(a, b, cQ, DE, case='auto', parametric=False):
"""
Bound on polynomial solutions.
Given a derivation D on k[t] and a, b, c in k[t] with a != 0, return
n in ZZ such that deg(q) <= n for any solution q in k[t] of
a*Dq + b*q == c, when parametric=False, or deg(q) <= n for any solution
c1, ..., cm in Const(k) and q in k[t] of a*Dq + b*q == Sum(ci*gi, (i, 1, m))
when parametric=True.
For parametric=False, cQ is c, a Poly; for parametric=True, cQ is Q ==
[q1, ..., qm], a list of Polys.
This constitutes step 3 of the outline given in the rde.py docstring.
"""
from sympy.integrals.prde import (parametric_log_deriv, limited_integrate,
is_log_deriv_k_t_radical_in_field)
# TODO: finish writing this and write tests
if case == 'auto':
case = DE.case
da = a.degree(DE.t)
db = b.degree(DE.t)
# The parametric and regular cases are identical, except for this part
if parametric:
dc = max([i.degree(DE.t) for i in cQ])
else:
dc = cQ.degree(DE.t)
alpha = cancel(-b.as_poly(DE.t).LC().as_expr()/
a.as_poly(DE.t).LC().as_expr())
if case == 'base':
n = max(0, dc - max(db, da - 1))
if db == da - 1 and alpha.is_Integer:
n = max(0, alpha, dc - db)
elif case == 'primitive':
if db > da:
n = max(0, dc - db)
else:
n = max(0, dc - da + 1)
etaa, etad = frac_in(DE.d, DE.T[DE.level - 1])
t1 = DE.t
with DecrementLevel(DE):
alphaa, alphad = frac_in(alpha, DE.t)
if db == da - 1:
# if alpha == m*Dt + Dz for z in k and m in ZZ:
try:
(za, zd), m = limited_integrate(alphaa, alphad, [(etaa, etad)],
DE)
except NonElementaryIntegralException:
pass
else:
if len(m) != 1:
raise ValueError("Length of m should be 1")
n = max(n, m[0])
elif db == da:
# if alpha == Dz/z for z in k*:
# beta = -lc(a*Dz + b*z)/(z*lc(a))
# if beta == m*Dt + Dw for w in k and m in ZZ:
# n = max(n, m)
A = is_log_deriv_k_t_radical_in_field(alphaa, alphad, DE)
if A is not None:
aa, z = A
if aa == 1:
beta = -(a*derivation(z, DE).as_poly(t1) +
b*z.as_poly(t1)).LC()/(z.as_expr()*a.LC())
betaa, betad = frac_in(beta, DE.t)
try:
(za, zd), m = limited_integrate(betaa, betad,
[(etaa, etad)], DE)
except NonElementaryIntegralException:
pass
else:
if len(m) != 1:
raise ValueError("Length of m should be 1")
n = max(n, m[0])
elif case == 'exp':
n = max(0, dc - max(db, da))
if da == db:
etaa, etad = frac_in(DE.d.quo(Poly(DE.t, DE.t)), DE.T[DE.level - 1])
with DecrementLevel(DE):
alphaa, alphad = frac_in(alpha, DE.t)
A = parametric_log_deriv(alphaa, alphad, etaa, etad, DE)
if A is not None:
# if alpha == m*Dt/t + Dz/z for z in k* and m in ZZ:
# n = max(n, m)
a, m, z = A
if a == 1:
n = max(n, m)
elif case in ['tan', 'other_nonlinear']:
delta = DE.d.degree(DE.t)
lam = DE.d.LC()
alpha = cancel(alpha/lam)
n = max(0, dc - max(da + delta - 1, db))
if db == da + delta - 1 and alpha.is_Integer:
n = max(0, alpha, dc - db)
else:
raise ValueError("case must be one of {'exp', 'tan', 'primitive', "
"'other_nonlinear', 'base'}, not %s." % case)
return n
def spde(a, b, c, n, DE):
"""
Rothstein's Special Polynomial Differential Equation algorithm.
Given a derivation D on k[t], an integer n and a, b, c in k[t] with
a != 0, either raise NonElementaryIntegralException, in which case the
equation a*Dq + b*q == c has no solution of degree at most n in
k[t], or return the tuple (B, C, m, alpha, beta) such that B, C,
alpha, beta in k[t], m in ZZ, and any solution q in k[t] of degree
at most n of a*Dq + b*q == c must be of the form
q == alpha*h + beta, where h in k[t], deg(h) <= m, and Dh + B*h == C.
This constitutes step 4 of the outline given in the rde.py docstring.
"""
zero = Poly(0, DE.t)
alpha = Poly(1, DE.t)
beta = Poly(0, DE.t)
while True:
if c.is_zero:
return (zero, zero, 0, zero, beta) # -1 is more to the point
if (n < 0) is True:
raise NonElementaryIntegralException
g = a.gcd(b)
if not c.rem(g).is_zero: # g does not divide c
raise NonElementaryIntegralException
a, b, c = a.quo(g), b.quo(g), c.quo(g)
if a.degree(DE.t) == 0:
b = b.to_field().quo(a)
c = c.to_field().quo(a)
return (b, c, n, alpha, beta)
r, z = gcdex_diophantine(b, a, c)
b += derivation(a, DE)
c = z - derivation(r, DE)
n -= a.degree(DE.t)
beta += alpha * r
alpha *= a
def no_cancel_b_large(b, c, n, DE):
"""
Poly Risch Differential Equation - No cancellation: deg(b) large enough.
Given a derivation D on k[t], n either an integer or +oo, and b, c
in k[t] with b != 0 and either D == d/dt or
deg(b) > max(0, deg(D) - 1), either raise NonElementaryIntegralException, in
which case the equation Dq + b*q == c has no solution of degree at
most n in k[t], or a solution q in k[t] of this equation with
deg(q) < n.
"""
q = Poly(0, DE.t)
while not c.is_zero:
m = c.degree(DE.t) - b.degree(DE.t)
if not 0 <= m <= n: # n < 0 or m < 0 or m > n
raise NonElementaryIntegralException
p = Poly(c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC()*DE.t**m, DE.t,
expand=False)
q = q + p
n = m - 1
c = c - derivation(p, DE) - b*p
return q
def no_cancel_b_small(b, c, n, DE):
"""
Poly Risch Differential Equation - No cancellation: deg(b) small enough.
Given a derivation D on k[t], n either an integer or +oo, and b, c
in k[t] with deg(b) < deg(D) - 1 and either D == d/dt or
deg(D) >= 2, either raise NonElementaryIntegralException, in which case the
equation Dq + b*q == c has no solution of degree at most n in k[t],
or a solution q in k[t] of this equation with deg(q) <= n, or the
tuple (h, b0, c0) such that h in k[t], b0, c0, in k, and for any
solution q in k[t] of degree at most n of Dq + bq == c, y == q - h
is a solution in k of Dy + b0*y == c0.
"""
q = Poly(0, DE.t)
while not c.is_zero:
if n == 0:
m = 0
else:
m = c.degree(DE.t) - DE.d.degree(DE.t) + 1
if not 0 <= m <= n: # n < 0 or m < 0 or m > n
raise NonElementaryIntegralException
if m > 0:
p = Poly(c.as_poly(DE.t).LC()/(m*DE.d.as_poly(DE.t).LC())*DE.t**m,
DE.t, expand=False)
else:
if b.degree(DE.t) != c.degree(DE.t):
raise NonElementaryIntegralException
if b.degree(DE.t) == 0:
return (q, b.as_poly(DE.T[DE.level - 1]),
c.as_poly(DE.T[DE.level - 1]))
p = Poly(c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC(), DE.t,
expand=False)
q = q + p
n = m - 1
c = c - derivation(p, DE) - b*p
return q
# TODO: better name for this function
def no_cancel_equal(b, c, n, DE):
"""
Poly Risch Differential Equation - No cancellation: deg(b) == deg(D) - 1
Given a derivation D on k[t] with deg(D) >= 2, n either an integer
or +oo, and b, c in k[t] with deg(b) == deg(D) - 1, either raise
NonElementaryIntegralException, in which case the equation Dq + b*q == c has
no solution of degree at most n in k[t], or a solution q in k[t] of
this equation with deg(q) <= n, or the tuple (h, m, C) such that h
in k[t], m in ZZ, and C in k[t], and for any solution q in k[t] of
degree at most n of Dq + b*q == c, y == q - h is a solution in k[t]
of degree at most m of Dy + b*y == C.
"""
q = Poly(0, DE.t)
lc = cancel(-b.as_poly(DE.t).LC()/DE.d.as_poly(DE.t).LC())
if lc.is_Integer and lc.is_positive:
M = lc
else:
M = -1
while not c.is_zero:
m = max(M, c.degree(DE.t) - DE.d.degree(DE.t) + 1)
if not 0 <= m <= n: # n < 0 or m < 0 or m > n
raise NonElementaryIntegralException
u = cancel(m*DE.d.as_poly(DE.t).LC() + b.as_poly(DE.t).LC())
if u.is_zero:
return (q, m, c)
if m > 0:
p = Poly(c.as_poly(DE.t).LC()/u*DE.t**m, DE.t, expand=False)
else:
if c.degree(DE.t) != DE.d.degree(DE.t) - 1:
raise NonElementaryIntegralException
else:
p = c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC()
q = q + p
n = m - 1
c = c - derivation(p, DE) - b*p
return q
def cancel_primitive(b, c, n, DE):
"""
Poly Risch Differential Equation - Cancellation: Primitive case.
Given a derivation D on k[t], n either an integer or +oo, b in k, and
c in k[t] with Dt in k and b != 0, either raise
NonElementaryIntegralException, in which case the equation Dq + b*q == c
has no solution of degree at most n in k[t], or a solution q in k[t] of
this equation with deg(q) <= n.
"""
from sympy.integrals.prde import is_log_deriv_k_t_radical_in_field
with DecrementLevel(DE):
ba, bd = frac_in(b, DE.t)
A = is_log_deriv_k_t_radical_in_field(ba, bd, DE)
if A is not None:
n, z = A
if n == 1: # b == Dz/z
raise NotImplementedError("is_deriv_in_field() is required to "
" solve this problem.")
# if z*c == Dp for p in k[t] and deg(p) <= n:
# return p/z
# else:
# raise NonElementaryIntegralException
if c.is_zero:
return c # return 0
if n < c.degree(DE.t):
raise NonElementaryIntegralException
q = Poly(0, DE.t)
while not c.is_zero:
m = c.degree(DE.t)
if n < m:
raise NonElementaryIntegralException
with DecrementLevel(DE):
a2a, a2d = frac_in(c.LC(), DE.t)
sa, sd = rischDE(ba, bd, a2a, a2d, DE)
stm = Poly(sa.as_expr()/sd.as_expr()*DE.t**m, DE.t, expand=False)
q += stm
n = m - 1
c -= b*stm + derivation(stm, DE)
return q
def cancel_exp(b, c, n, DE):
"""
Poly Risch Differential Equation - Cancellation: Hyperexponential case.
Given a derivation D on k[t], n either an integer or +oo, b in k, and
c in k[t] with Dt/t in k and b != 0, either raise
NonElementaryIntegralException, in which case the equation Dq + b*q == c
has no solution of degree at most n in k[t], or a solution q in k[t] of
this equation with deg(q) <= n.
"""
from sympy.integrals.prde import parametric_log_deriv
eta = DE.d.quo(Poly(DE.t, DE.t)).as_expr()
with DecrementLevel(DE):
etaa, etad = frac_in(eta, DE.t)
ba, bd = frac_in(b, DE.t)
A = parametric_log_deriv(ba, bd, etaa, etad, DE)
if A is not None:
a, m, z = A
if a == 1:
raise NotImplementedError("is_deriv_in_field() is required to "
"solve this problem.")
# if c*z*t**m == Dp for p in k<t> and q = p/(z*t**m) in k[t] and
# deg(q) <= n:
# return q
# else:
# raise NonElementaryIntegralException
if c.is_zero:
return c # return 0
if n < c.degree(DE.t):
raise NonElementaryIntegralException
q = Poly(0, DE.t)
while not c.is_zero:
m = c.degree(DE.t)
if n < m:
raise NonElementaryIntegralException
# a1 = b + m*Dt/t
a1 = b.as_expr()
with DecrementLevel(DE):
# TODO: Write a dummy function that does this idiom
a1a, a1d = frac_in(a1, DE.t)
a1a = a1a*etad + etaa*a1d*Poly(m, DE.t)
a1d = a1d*etad
a2a, a2d = frac_in(c.LC(), DE.t)
sa, sd = rischDE(a1a, a1d, a2a, a2d, DE)
stm = Poly(sa.as_expr()/sd.as_expr()*DE.t**m, DE.t, expand=False)
q += stm
n = m - 1
c -= b*stm + derivation(stm, DE) # deg(c) becomes smaller
return q
def solve_poly_rde(b, cQ, n, DE, parametric=False):
"""
Solve a Polynomial Risch Differential Equation with degree bound n.
This constitutes step 4 of the outline given in the rde.py docstring.
For parametric=False, cQ is c, a Poly; for parametric=True, cQ is Q ==
[q1, ..., qm], a list of Polys.
"""
from sympy.integrals.prde import (prde_no_cancel_b_large,
prde_no_cancel_b_small)
# No cancellation
if not b.is_zero and (DE.case == 'base' or
b.degree(DE.t) > max(0, DE.d.degree(DE.t) - 1)):
if parametric:
return prde_no_cancel_b_large(b, cQ, n, DE)
return no_cancel_b_large(b, cQ, n, DE)
elif (b.is_zero or b.degree(DE.t) < DE.d.degree(DE.t) - 1) and \
(DE.case == 'base' or DE.d.degree(DE.t) >= 2):
if parametric:
return prde_no_cancel_b_small(b, cQ, n, DE)
R = no_cancel_b_small(b, cQ, n, DE)
if isinstance(R, Poly):
return R
else:
# XXX: Might k be a field? (pg. 209)
h, b0, c0 = R
with DecrementLevel(DE):
b0, c0 = b0.as_poly(DE.t), c0.as_poly(DE.t)
if b0 is None: # See above comment
raise ValueError("b0 should be a non-Null value")
if c0 is None:
raise ValueError("c0 should be a non-Null value")
y = solve_poly_rde(b0, c0, n, DE).as_poly(DE.t)
return h + y
elif DE.d.degree(DE.t) >= 2 and b.degree(DE.t) == DE.d.degree(DE.t) - 1 and \
n > -b.as_poly(DE.t).LC()/DE.d.as_poly(DE.t).LC():
# TODO: Is this check necessary, and if so, what should it do if it fails?
# b comes from the first element returned from spde()
if not b.as_poly(DE.t).LC().is_number:
raise TypeError("Result should be a number")
if parametric:
raise NotImplementedError("prde_no_cancel_b_equal() is not yet "
"implemented.")
R = no_cancel_equal(b, cQ, n, DE)
if isinstance(R, Poly):
return R
else:
h, m, C = R
# XXX: Or should it be rischDE()?
y = solve_poly_rde(b, C, m, DE)
return h + y
else:
# Cancellation
if b.is_zero:
raise NotImplementedError("Remaining cases for Poly (P)RDE are "
"not yet implemented (is_deriv_in_field() required).")
else:
if DE.case == 'exp':
if parametric:
raise NotImplementedError("Parametric RDE cancellation "
"hyperexponential case is not yet implemented.")
return cancel_exp(b, cQ, n, DE)
elif DE.case == 'primitive':
if parametric:
raise NotImplementedError("Parametric RDE cancellation "
"primitive case is not yet implemented.")
return cancel_primitive(b, cQ, n, DE)
else:
raise NotImplementedError("Other Poly (P)RDE cancellation "
"cases are not yet implemented (%s)." % case)
if parametric:
raise NotImplementedError("Remaining cases for Poly PRDE not yet "
"implemented.")
raise NotImplementedError("Remaining cases for Poly RDE not yet "
"implemented.")
def rischDE(fa, fd, ga, gd, DE):
"""
Solve a Risch Differential Equation: Dy + f*y == g.
See the outline in the docstring of rde.py for more information
about the procedure used. Either raise NonElementaryIntegralException, in
which case there is no solution y in the given differential field,
or return y in k(t) satisfying Dy + f*y == g, or raise
NotImplementedError, in which case, the algorithms necessary to
solve the given Risch Differential Equation have not yet been
implemented.
"""
_, (fa, fd) = weak_normalizer(fa, fd, DE)
a, (ba, bd), (ca, cd), hn = normal_denom(fa, fd, ga, gd, DE)
A, B, C, hs = special_denom(a, ba, bd, ca, cd, DE)
try:
# Until this is fully implemented, use oo. Note that this will almost
# certainly cause non-termination in spde() (unless A == 1), and
# *might* lead to non-termination in the next step for a nonelementary
# integral (I don't know for certain yet). Fortunately, spde() is
# currently written recursively, so this will just give
# RuntimeError: maximum recursion depth exceeded.
n = bound_degree(A, B, C, DE)
except NotImplementedError:
# Useful for debugging:
# import warnings
# warnings.warn("rischDE: Proceeding with n = oo; may cause "
# "non-termination.")
n = oo
B, C, m, alpha, beta = spde(A, B, C, n, DE)
if C.is_zero:
y = C
else:
y = solve_poly_rde(B, C, m, DE)
return (alpha*y + beta, hn*hs)
| bsd-3-clause |
Lamecarlate/gourmet | gourmet/recindex.py | 6 | 30248 | #!/usr/bin/env python
from ImageExtras import get_pixbuf_from_jpg
from gdebug import debug
from gettext import gettext as _, ngettext
from gglobals import REC_ATTRS, INT_REC_ATTRS, DEFAULT_HIDDEN_COLUMNS
from gtk_extras import WidgetSaver, ratingWidget, cb_extras as cb, \
mnemonic_manager, pageable_store, treeview_extras as te
import convert
import Undo
import gobject
import gtk
import gtk.gdk
import pango
class RecIndex:
"""We handle the 'index view' of recipes, which puts
a recipe model into a tree and allows it to be searched
and sorted. We're a separate class from the main recipe
program so that we can be created again (e.g. in the recSelector
dialog called from a recipe card."""
default_searches = [{'column':'deleted','operator':'=','search':False}]
def __init__ (self, ui, rd, rg, editable=False):
#self.visible = 1 # can equal 1 or 2
self.editable=editable
self.selected = True
self.rtcols=rg.rtcols
self.rtcolsdic=rg.rtcolsdic
self.rtwidgdic=rg.rtwidgdic
self.prefs=rg.prefs
self.ui = ui
self.rd = rd
self.rg = rg
self.searchByDic = {
unicode(_('anywhere')):'anywhere',
unicode(_('title')):'title',
unicode(_('ingredient')):'ingredient',
unicode(_('instructions')):'instructions',
unicode(_('notes')):'modifications',
unicode(_('category')):'category',
unicode(_('cuisine')):'cuisine',
#_('rating'):'rating',
unicode(_('source')):'source',
}
self.searchByList = [_('anywhere'),
_('title'),
_('ingredient'),
_('category'),
_('cuisine'),
#_('rating'),
_('source'),
_('instructions'),
_('notes'),
]
# ACK, this breaks internationalization!
#self.SEARCH_KEY_DICT = {
# "t":_("title"),
# "i":_("ingredient"),
# "c":_("category"),
# "u":_("cuisine"),
# 's':_("source"),
# }
self.setup_search_actions()
self.setup_widgets()
def setup_widgets (self):
self.srchentry=self.ui.get_object('rlistSearchbox')
self.limitButton = self.ui.get_object('rlAddButton')
# Don't # allow for special keybindings
#self.srchentry.connect('key_press_event',self.srchentry_keypressCB)
self.SEARCH_MENU_KEY = "b"
self.srchLimitBar = self.ui.get_object('srchLimitBar')
assert(self.srchLimitBar)
self.srchLimitBar.hide()
self.srchLimitLabel=self.ui.get_object('srchLimitLabel')
self.srchLimitClearButton = self.ui.get_object('srchLimitClear')
self.srchLimitText=self.srchLimitLabel.get_text()
self.srchLimitDefaultText=self.srchLimitText
self.searchButton = self.ui.get_object('searchButton')
self.rSearchByMenu = self.ui.get_object('rlistSearchByMenu')
cb.set_model_from_list(self.rSearchByMenu, self.searchByList, expand=False)
cb.setup_typeahead(self.rSearchByMenu)
self.rSearchByMenu.set_active(0)
self.rSearchByMenu.connect('changed',self.search_as_you_type)
self.sautTog = self.ui.get_object('searchAsYouTypeToggle')
self.search_actions.get_action('toggleSearchAsYouType').connect_proxy(self.sautTog)
self.regexpTog = self.ui.get_object('regexpTog')
self.searchOptionsBox = self.ui.get_object('searchOptionsBox')
self.search_actions.get_action('toggleShowSearchOptions').connect_proxy(
self.ui.get_object('searchOptionsToggle')
)
self.search_actions.get_action('toggleRegexp').connect_proxy(self.regexpTog)
self.rectree = self.ui.get_object('recTree')
self.sw = self.ui.get_object('scrolledwindow')
self.rectree.connect('start-interactive-search',lambda *args: self.srchentry.grab_focus())
self.prev_button = self.ui.get_object('prevButton')
self.next_button = self.ui.get_object('nextButton')
self.first_button = self.ui.get_object('firstButton')
self.last_button = self.ui.get_object('lastButton')
self.showing_label = self.ui.get_object('showingLabel')
self.stat = self.ui.get_object('statusbar')
self.contid = self.stat.get_context_id('main')
self.setup_search_views()
self.setup_rectree()
self.prev_button.connect('clicked',lambda *args: self.rmodel.prev_page())
self.next_button.connect('clicked',lambda *args: self.rmodel.next_page())
self.first_button.connect('clicked',lambda *args: self.rmodel.goto_first_page())
self.last_button.connect('clicked',lambda *args: self.rmodel.goto_last_page())
self.ui.connect_signals({
'rlistSearch': self.search_as_you_type,
'ingredientSearch' : lambda *args: self.set_search_by('ingredient'),
'titleSearch' : lambda *args: self.set_search_by('title'),
'ratingSearch' : lambda *args: self.set_search_by('rating'),
'categorySearch' : lambda *args: self.set_search_by('category'),
'cuisineSearch' : lambda *args: self.set_search_by('cuisine'),
'search' : self.search,
'searchBoxActivatedCB':self.search_entry_activate_cb,
'rlistReset' : self.reset_search,
'rlistLimit' : self.limit_search,
'search_as_you_type_toggle' : self.toggleTypeSearchCB,})
self.toggleTypeSearchCB(self.sautTog)
# this has to come after the type toggle is connected!
self.rg.conf.append(WidgetSaver.WidgetSaver(
self.sautTog,
self.prefs.get('sautTog',
{'active':self.sautTog.get_active()}),
['toggled']))
self.rg.conf.append(WidgetSaver.WidgetSaver(
self.regexpTog,
self.prefs.get('regexpTog',
{'active':self.regexpTog.get_active()}),
['toggled']))
# and we update our count with each deletion.
self.rd.delete_hooks.append(self.set_reccount)
# setup a history
self.uim=self.ui.get_object('undo_menu_item')
self.rim=self.ui.get_object('redo_menu_item')
self.raim=self.ui.get_object('reapply_menu_item')
self.history = Undo.UndoHistoryList(self.uim,self.rim,self.raim)
# Fix up our mnemonics with some heavenly magic
self.mm = mnemonic_manager.MnemonicManager()
self.mm.sacred_cows.append("search for") # Don't touch _Search for:
self.mm.add_builder(self.ui)
self.mm.add_treeview(self.rectree)
self.mm.fix_conflicts_peacefully()
def setup_search_actions (self):
self.search_actions = gtk.ActionGroup('SearchActions')
self.search_actions.add_toggle_actions([
('toggleRegexp',None,_('Use regular expressions in search'),
None,_('Use regular expressions (an advanced search language) in text search'),
self.toggleRegexpCB,False),
('toggleSearchAsYouType',None,_('Search as you type'),None,
_('Search as you type (turn off if search is too slow).'),
self.toggleTypeSearchCB, True
),
('toggleShowSearchOptions',
None,
_('Show Search _Options'),
None,
_('Show advanced searching options'),
self.toggleShowSearchOptions),
])
def setup_search_views (self):
"""Setup our views of the database."""
self.last_search = {}
#self.rvw = self.rd.fetch_all(self.rd.recipe_table,deleted=False)
self.searches = self.default_searches[0:]
self.sort_by = []
self.rvw = self.rd.search_recipes(self.searches,sort_by=self.sort_by)
def make_rec_visible (self, *args):
"""Make sure recipe REC shows up in our index."""
#if not self.rg.wait_to_filter:
#self.setup_search_views()
self.redo_search()
#debug('make_rec_visible',0)
#self.visible.append(rec.id)
#if not self.rg.wait_to_filter:
# self.rmodel_filter.refilter()
def search_entry_activate_cb (self, *args):
if self.rmodel._get_length_()==1:
self.rec_tree_select_rec()
elif self.srchentry.get_text():
if not self.search_as_you_type:
self.search()
gobject.idle_add(lambda *args: self.limit_search())
else:
self.limit_search()
def rmodel_page_changed_cb (self, rmodel):
if rmodel.page==0:
self.prev_button.set_sensitive(False)
self.first_button.set_sensitive(False)
else:
self.prev_button.set_sensitive(True)
self.first_button.set_sensitive(True)
if rmodel.get_last_page()==rmodel.page:
self.next_button.set_sensitive(False)
self.last_button.set_sensitive(False)
else:
self.next_button.set_sensitive(True)
self.last_button.set_sensitive(True)
self.set_reccount()
def rmodel_sort_cb (self, rmodel, sorts):
self.sort_by = sorts
self.last_search = {}
self.search()
#self.do_search(None,None)
def create_rmodel (self, vw):
self.rmodel = RecipeModel(vw,self.rd,per_page=self.prefs.get('recipes_per_page',12))
#self.set_reccount() # This will be called by the rmodel_page_changed_cb
def setup_rectree (self):
"""Create our recipe treemodel."""
self.create_rmodel(self.rvw)
self.rmodel.connect('page-changed',self.rmodel_page_changed_cb)
self.rmodel.connect('view-changed',self.rmodel_page_changed_cb)
self.rmodel.connect('view-sort',self.rmodel_sort_cb)
# and call our handler once to update our prev/next buttons + label
self.rmodel_page_changed_cb(self.rmodel)
# and hook up our model
self.rectree.set_model(self.rmodel)
self.rectree.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.selection_changed()
self.setup_reccolumns()
# this has to come after columns are added or else adding columns resets out column order!
self.rectree_conf=te.TreeViewConf(self.rectree,
hidden=self.prefs.get('rectree_hidden_columns',DEFAULT_HIDDEN_COLUMNS),
order=self.prefs.get('rectree_column_order',{}))
self.rectree_conf.apply_column_order()
self.rectree_conf.apply_visibility()
self.rectree.connect("row-activated",self.rec_tree_select_rec)
self.rectree.connect('key-press-event',self.tree_keypress_cb)
self.rectree.get_selection().connect("changed",self.selection_changedCB)
self.rectree.set_property('rules-hint',True) # stripes!
self.rectree.expand_all()
self.rectree.show()
def set_reccount (self, *args):
"""Display the count of currently visible recipes."""
debug("set_reccount (self, *args):",5)
self.count = self.rmodel._get_length_()
bottom,top,total = self.rmodel.showing()
if top >= total and bottom==1:
lab = ngettext('%s recipe','%s recipes',top)%top
for b in self.first_button,self.prev_button,self.next_button,self.last_button:
b.hide()
else:
for b in self.first_button,self.prev_button,self.next_button,self.last_button:
b.show()
# Do not translate bottom, top and total -- I use these fancy formatting
# strings in case your language needs the order changed!
lab = _('Showing recipes %(bottom)s to %(top)s of %(total)s')%locals()
self.showing_label.set_markup('<i>' + lab + '</i>')
if self.count == 1:
sel = self.rectree.get_selection()
if sel: sel.select_path((0,))
def setup_reccolumns (self):
"""Setup the columns of our recipe index TreeView"""
renderer = gtk.CellRendererPixbuf()
cssu=pageable_store.ColumnSortSetterUpper(self.rmodel)
col = gtk.TreeViewColumn("",renderer,pixbuf=1)
col.set_min_width(-1)
self.rectree.append_column(col)
n = 2
_title_to_num_ = {}
for c in self.rtcols:
if c=='rating':
# special case -- for ratings we set up our lovely
# star widget
twsm = ratingWidget.TreeWithStarMaker(
self.rectree,
self.rg.star_generator,
data_col=n,
col_title='_%s'%self.rtcolsdic[c],
handlers=[self.star_change_cb],
properties={'reorderable':True,
'resizable':True},
)
cssu.set_sort_column_id(twsm.col,twsm.data_col)
n += 1
twsm.col.set_min_width(110)
continue
# And we also special case our time column
elif c in ['preptime','cooktime']:
_title_to_num_[self.rtcolsdic[c]]=n
renderer=gtk.CellRendererText()
renderer.set_property('editable',True)
renderer.connect('edited',self.rtree_time_edited_cb,n,c)
def get_colnum (tc):
try:
t = tc.get_title()
if t:
return _title_to_num_[t.replace('_','')]
else:
print 'wtf, no title for ',tc
return -1
except:
print 'problem with ',tc
raise
ncols = self.rectree.insert_column_with_data_func(
-1,
'_%s'%self.rtcolsdic[c],
renderer,
lambda tc,cell,mod,titr: \
cell.set_property(
'text',
convert.seconds_to_timestring(mod.get_value(
titr,
get_colnum(tc),
#_title_to_num_[tc.get_title().replace('_','')],
))
)
)
col=self.rectree.get_column(ncols-1)
cssu.set_sort_column_id(col,n)
col.set_property('reorderable',True)
col.set_property('resizable',True)
n+=1
continue
elif self.editable and self.rtwidgdic[c]=='Combo':
renderer = gtk.CellRendererCombo()
model = gtk.ListStore(str)
if c=='category':
map(lambda i: model.append([i]),self.rg.rd.get_unique_values(c,self.rg.rd.categories_table)
)
else:
map(lambda i: model.append([i]),self.rg.rd.get_unique_values(c))
renderer.set_property('model',model)
renderer.set_property('text-column',0)
else:
renderer = gtk.CellRendererText()
if c=='link':
renderer.set_property('ellipsize',pango.ELLIPSIZE_END)
else:
renderer.get_property('wrap-width')
renderer.set_property('wrap-mode',pango.WRAP_WORD)
if c == 'title': renderer.set_property('wrap-width',200)
else: renderer.set_property('wrap-width',150)
renderer.set_property('editable',self.editable)
renderer.connect('edited',self.rtree_edited_cb,n, c)
titl = self.rtcolsdic[c]
col = gtk.TreeViewColumn('_%s'%titl,renderer, text=n)
# Ensure that the columns aren't really narrow on initialising.
#if c=='title': # Adjust these two to be even bigger
# col.set_min_width(200)
#else:
# col.set_min_width(60)
if c=='title':
col.set_property('expand',True)
col.set_reorderable(True)
col.set_resizable(True)
col.set_clickable(True)
#col.connect('clicked', self.column_sort)
self.rectree.append_column(col)
cssu.set_sort_column_id(col,n)
debug("Column %s is %s->%s"%(n,c,self.rtcolsdic[c]),5)
n += 1
def toggleTypeSearchCB (self, widget):
"""Toggle search-as-you-type option."""
if widget.get_active():
self.search_as_you_type=True
self.searchButton.hide()
else:
self.search_as_you_type=False
self.searchButton.show()
def toggleRegexpCB (self, widget):
"""Toggle search-with-regexp option."""
#if widget.get_active():
# self.message('Advanced searching (regular expressions) turned on')
#else:
# self.message('Advanced searching off')
pass
def toggleShowSearchOptions (self, widget):
if widget.get_active():
self.searchOptionsBox.show()
else:
self.searchOptionsBox.hide()
def regexpp (self):
"""Return True if we're using regexps"""
if self.regexpTog.get_active():
return True
else:
return False
def search_as_you_type (self, *args):
"""If we're searching-as-we-type, search."""
if self.search_as_you_type:
self.search()
def set_search_by (self, str):
"""Manually set the search by label to str"""
debug('set_search_by',1)
#self.rSearchByMenu.get_children()[0].set_text(str)
cb.cb_set_active_text(self.rSearchByMenu, str)
self.search()
def redo_search (self, *args):
self.last_search = {}
self.search()
def search (self, *args):
debug("search (self, *args):",5)
txt = self.srchentry.get_text()
searchBy = cb.cb_get_active_text(self.rSearchByMenu)
searchBy = self.searchByDic[unicode(searchBy)]
if self.limitButton: self.limitButton.set_sensitive(txt!='')
if self.make_search_dic(txt,searchBy) == self.last_search:
debug("Same search!",1)
return
# Get window
if self.srchentry:
parent = self.srchentry.parent
while parent and not (isinstance(parent,gtk.Window)):
parent = parent.parent
parent.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
debug('Doing new search for %s, last search was %s'%(self.make_search_dic(txt,searchBy),self.last_search),1)
gobject.idle_add(lambda *args: (self.do_search(txt, searchBy) or parent.window.set_cursor(None)))
else:
gobject.idle_add(lambda *args: self.do_search(txt, searchBy))
def make_search_dic (self, txt, searchBy):
srch = {'column':searchBy}
if self.regexpp():
srch['operator'] = 'REGEXP'
srch['search'] = txt.replace(' %s '%_('or'), # or operator for searches
'|')
else:
srch['operator']='LIKE'
srch['search'] = '%' + txt.replace('%','%%')+'%'
return srch
def do_search (self, txt, searchBy):
if txt and searchBy:
srch = self.make_search_dic(txt,searchBy)
self.last_search = srch.copy()
self.update_rmodel(self.rd.search_recipes(
self.searches + [srch],
sort_by=self.sort_by)
)
elif self.searches:
self.update_rmodel(self.rd.search_recipes(
self.searches,
sort_by=self.sort_by)
)
else:
self.update_rmodel(self.rd.fetch_all(self.recipe_table,deleted=False,sort_by=self.sort_by))
def limit_search (self, *args):
debug("limit_search (self, *args):",5)
self.search() # make sure we've done the search...
self.searches.append(self.last_search)
last_col = self.last_search['column']
self.srchLimitBar.show()
if last_col != _('anywhere'):
newtext = ' ' + _('%s in %s')%(self.srchentry.get_text(),last_col)
else:
newtext = ' ' + self.srchentry.get_text()
if self.srchLimitDefaultText!=self.srchLimitLabel.get_text():
newtext = ',' + newtext
self.srchLimitText="%s%s"%(self.srchLimitLabel.get_text(),newtext)
self.srchLimitLabel.set_markup("<i>%s</i>"%self.srchLimitText)
self.srchentry.set_text("")
def reset_search (self, *args):
debug("reset_search (self, *args):",5)
self.srchLimitLabel.set_text(self.srchLimitDefaultText)
self.srchLimitText=self.srchLimitDefaultText
self.srchLimitBar.hide()
self.searches = self.default_searches[0:]
self.last_search={} # reset search so we redo it
self.search()
def get_rec_from_iter (self, iter):
debug("get_rec_from_iter (self, iter): %s"%iter,5)
obj=self.rectree.get_model().get_value(iter,0)
retval=self.rd.get_rec(obj.id)
return retval
def rtree_time_edited_cb (self, renderer, path_string, text, colnum, attribute):
if not text: secs = 0
else:
secs = self.rg.conv.timestring_to_seconds(text)
if not secs:
#self.message(_("Unable to recognize %s as a time."%text))
return
indices = path_string.split(':')
path = tuple( map(int, indices))
store = self.rectree.get_model()
iter = store.get_iter(path)
#self.rmodel.set_value(iter,colnum,secs)
rec = self.get_rec_from_iter(iter)
if convert.seconds_to_timestring(getattr(rec,attribute))!=text:
self.rd.undoable_modify_rec(rec,
{attribute:secs},
self.history,
get_current_rec_method=lambda *args: self.get_selected_recs_from_rec_tree()[0],
)
self.update_modified_recipe(rec,attribute,secs)
# Is this really stupid? I don't know, but I did it before so
# perhaps I had a reason.
#self.rmodel.row_changed(path,iter)
self.rmodel.update_iter(iter)
self.rd.save()
def rtree_edited_cb (self, renderer, path_string, text, colnum, attribute):
debug("rtree_edited_cb (self, renderer, path_string, text, colnum, attribute):",5)
indices = path_string.split(':')
path = tuple( map(int, indices))
store = self.rectree.get_model()
iter = store.get_iter(path)
if not iter: return
#self.rmodel.set_value(iter, colnum, text)
rec=self.get_rec_from_iter(iter)
if attribute=='category':
val = ", ".join(self.rd.get_cats(rec))
else:
val = "%s"%getattr(rec,attribute)
if val!=text:
# only bother with this if the value has actually changed!
self.rd.undoable_modify_rec(rec,
{attribute:text},
self.history,
get_current_rec_method=lambda *args: self.get_selected_recs_from_rec_tree()[0],
)
self.update_modified_recipe(rec,attribute,text)
self.rmodel.update_iter(iter)
self.rd.save()
def tree_keypress_cb (self, widget, event):
keyname = gtk.gdk.keyval_name(event.keyval)
if keyname in ['Page_Up','Page_Down']:
sb = self.sw.get_vscrollbar()
adj = self.sw.get_vscrollbar().get_adjustment()
val = adj.get_value(); upper = adj.get_upper()
if keyname == 'Page_Up':
if val > 0:
return None
self.rmodel.prev_page()
sb.set_value(upper)
return True
if keyname == 'Page_Down':
if val < (upper - adj.page_size):
return None
self.rmodel.next_page()
sb.set_value(0)
return True
if keyname == 'Home':
self.rmodel.goto_first_page()
self.sw.get_vscrollbar().set_value(0)
return True
if keyname == 'End':
self.rmodel.goto_last_page()
sb = self.sw.get_vscrollbar()
sb.set_value(sb.get_adjustment().get_upper())
return True
def star_change_cb (self, value, model, treeiter, column_number):
#itr = model.convert_iter_to_child_iter(None,treeiter)
#self.rmodel.set_value(treeiter,column_number,value)
rec = self.get_rec_from_iter(treeiter)
if getattr(rec,'rating')!=value:
self.rd.undoable_modify_rec(
rec,
{'rating':value},
self.history,
get_current_rec_method = lambda *args: self.get_selected_recs_from_rec_tree()[0],
)
#self.rmodel.row_changed(self.rmodel.get_path(treeiter),treeiter)
self.rmodel.update_iter(treeiter)
def update_modified_recipe(self,rec,attribute,text):
"""Update a modified recipe.
Subclasses can use this to update other widgets duplicating
the information in the index view."""
pass
def rec_tree_select_rec (self, *args):
raise NotImplementedError
def get_selected_recs_from_rec_tree (self):
debug("get_selected_recs_from_rec_tree (self):",5)
def foreach(model,path,iter,recs):
debug("foreach(model,path,iter,recs):",5)
try:
recs.append(model[path][0])
#recs.append(self.get_rec_from_iter(iter))
except:
debug("DEBUG: There was a problem with iter: %s path: %s"%(iter,path),1)
recs=[]
sel = self.rectree.get_selection()
if sel:
sel.selected_foreach(foreach,recs)
return recs
else:
return []
def selection_changedCB (self, *args):
"""We pass along true or false to selection_changed
to say whether there is a selection or not."""
debug("selection_changed (self, *args):",5)
v=self.rectree.get_selection().get_selected_rows()[1]
if v: selected=True
else: selected=False
self.selection_changed(v)
def selection_changed (self, selected=False):
"""This is a way to act whenever the selection changes."""
pass
def visibility_fun (self, model, iter):
try:
if (model.get_value(iter,0) and
not model.get_value(iter,0).deleted and
model.get_value(iter, 0).id in self.visible):
return True
else: return False
except:
debug('something bizaare just happened in visibility_fun',1)
return False
def update_rmodel (self, recipe_table):
self.rmodel.change_view(recipe_table)
self.set_reccount()
class RecipeModel (pageable_store.PageableViewStore):
"""A ListStore to hold our recipes in 'pages' so we don't load our
whole database at a time.
"""
per_page = 12
page = 0
columns_and_types = [('rec',gobject.TYPE_PYOBJECT,),
('thumb',gtk.gdk.Pixbuf),
]
for n in [r[0] for r in REC_ATTRS]:
if n in INT_REC_ATTRS: columns_and_types.append((n,int))
else: columns_and_types.append((n,str))
columns = [c[0] for c in columns_and_types]
column_types = [c[1] for c in columns_and_types]
def __init__ (self, vw, rd, per_page=None):
self.rd = rd
pageable_store.PageableViewStore.__init__(self,
vw,
columns=self.columns,
column_types=self.column_types,
per_page=per_page)
self.made_categories = False
def _get_slice_ (self,bottom,top):
try:
return [[self._get_value_(r,col) for col in self.columns] for r in self.view[bottom:top]]
except:
print '_get_slice_ failed with',bottom,top
raise
def _get_value_ (self, row, attr):
if attr=='category':
cats = self.rd.get_cats(row)
if cats: return ", ".join(cats)
else: return ""
elif attr=='rec':
return row
elif attr=='thumb':
if row.thumb: return get_pixbuf_from_jpg(row.thumb)
else: return None
elif attr in INT_REC_ATTRS:
return getattr(row,attr) or 0
else:
val = getattr(row,attr)
if val: return str(val)
else: return None
#else:
#
# return str(getattr(row,attr))
def update_recipe (self, recipe):
"""Handed a recipe (or a recipe ID), we update its display if visible."""
debug('Updating recipe %s'%recipe.title,3)
if type(recipe)!=int: recipe=recipe.id # make recipe == id
for n,row in enumerate(self):
debug('Looking at row',3)
if row[0].id==recipe:
indx = int(n + (self.page * self.per_page))
# update parent
self.parent_list[indx] = self.rd.fetch_one(self.rd.recipe_table,
id=recipe)
# update self
self.update_iter(row.iter)
debug('updated row -- breaking',3)
break
| gpl-2.0 |
mohamedhagag/connector | connector/unit/synchronizer.py | 18 | 3407 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from ..connector import ConnectorUnit
from .mapper import Mapper, ImportMapper, ExportMapper
from .backend_adapter import BackendAdapter
class Synchronizer(ConnectorUnit):
""" Base class for synchronizers """
# implement in sub-classes
_model_name = None
_base_mapper = Mapper
_base_backend_adapter = BackendAdapter
def __init__(self, connector_env):
super(Synchronizer, self).__init__(connector_env)
self._backend_adapter = None
self._binder = None
self._mapper = None
def run(self):
""" Run the synchronization """
raise NotImplementedError
@property
def mapper(self):
""" Return an instance of ``Mapper`` for the synchronization.
The instanciation is delayed because some synchronisations do
not need such an unit and the unit may not exist.
:rtype: :py:class:`connector.unit.mapper.Mapper`
"""
if self._mapper is None:
self._mapper = self.unit_for(self._base_mapper)
return self._mapper
@property
def binder(self):
""" Return an instance of ``Binder`` for the synchronization.
The instanciation is delayed because some synchronisations do
not need such an unit and the unit may not exist.
:rtype: :py:class:`connector.unit.binder.Binder`
"""
if self._binder is None:
self._binder = self.binder_for()
return self._binder
@property
def backend_adapter(self):
""" Return an instance of ``BackendAdapter`` for the
synchronization.
The instanciation is delayed because some synchronisations do
not need such an unit and the unit may not exist.
:rtype: :py:class:`connector.unit.backend_adapter.BackendAdapter`
"""
if self._backend_adapter is None:
self._backend_adapter = self.unit_for(self._base_backend_adapter)
return self._backend_adapter
class Exporter(Synchronizer):
""" Synchronizer for exporting data from OpenERP to a backend """
_base_mapper = ExportMapper
ExportSynchronizer = Exporter # deprecated
class Importer(Synchronizer):
""" Synchronizer for importing data from a backend to OpenERP """
_base_mapper = ImportMapper
ImportSynchronizer = Importer # deprecated
class Deleter(Synchronizer):
""" Synchronizer for deleting a record on the backend """
DeleteSynchronizer = Deleter # deprecated
| agpl-3.0 |
paris-saclay-cds/ramp-workflow | rampwf/score_types/soft_accuracy.py | 1 | 1665 | """A generalization of the classification accuracy with cross-class scores.
Soften the accuracy score by giving scores through certain misclassifications
defined by the score matrix. For example, in ordinal regression we may want
not to penalize too much misclassifications to neighbor classes. The score also
generalizes RMSE-like regression scores for ordinal regression (when true and
predicted output levels are coming from a fixed set) by allowing to define
arbitrary misclassification scores.
"""
from __future__ import division
import numpy as np
from .base import BaseScoreType
class SoftAccuracy(BaseScoreType):
is_lower_the_better = False
minimum = 0.0
def __init__(self, score_matrix, name='soft precision', precision=2):
self.name = name
self.precision = precision
self.maximum = np.max(score_matrix)
self.score_matrix = score_matrix
def __call__(self, y_true_proba, y_proba):
# Clip negative probas
y_proba_positive = np.clip(y_proba, 0, 1)
# normalize rows
y_proba_normalized = y_proba_positive / np.sum(
y_proba_positive, axis=1, keepdims=True)
# Smooth true probabilities with score_matrix
y_true_smoothed = y_true_proba.dot(self.score_matrix)
# Compute dot product between the predicted probabilities and
# the smoothed true "probabilites" ("" because it does not sum to 1)
scores = np.sum(y_proba_normalized * y_true_smoothed, axis=1)
scores = np.nan_to_num(scores)
score = np.mean(scores)
# to pick up all zero probabilities
score = np.nan_to_num(score)
return score
| bsd-3-clause |
GinnyN/Team-Fortress-RPG-Generators | django/core/management/commands/startproject.py | 201 | 1323 | from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
from django.utils.crypto import get_random_string
from django.utils.importlib import import_module
class Command(TemplateCommand):
help = ("Creates a Django project directory structure for the given "
"project name in the current directory or optionally in the "
"given directory.")
def handle(self, project_name=None, target=None, *args, **options):
if project_name is None:
raise CommandError("you must provide a project name")
# Check that the project_name cannot be imported.
try:
import_module(project_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as a "
"project name. Please try another name." %
project_name)
# Create a random SECRET_KEY hash to put it in the main settings.
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
options['secret_key'] = get_random_string(50, chars)
super(Command, self).handle('project', project_name, target, **options)
| bsd-3-clause |
rembo10/headphones | lib/unidecode/x0be.py | 253 | 4849 | data = (
'byum', # 0x00
'byub', # 0x01
'byubs', # 0x02
'byus', # 0x03
'byuss', # 0x04
'byung', # 0x05
'byuj', # 0x06
'byuc', # 0x07
'byuk', # 0x08
'byut', # 0x09
'byup', # 0x0a
'byuh', # 0x0b
'beu', # 0x0c
'beug', # 0x0d
'beugg', # 0x0e
'beugs', # 0x0f
'beun', # 0x10
'beunj', # 0x11
'beunh', # 0x12
'beud', # 0x13
'beul', # 0x14
'beulg', # 0x15
'beulm', # 0x16
'beulb', # 0x17
'beuls', # 0x18
'beult', # 0x19
'beulp', # 0x1a
'beulh', # 0x1b
'beum', # 0x1c
'beub', # 0x1d
'beubs', # 0x1e
'beus', # 0x1f
'beuss', # 0x20
'beung', # 0x21
'beuj', # 0x22
'beuc', # 0x23
'beuk', # 0x24
'beut', # 0x25
'beup', # 0x26
'beuh', # 0x27
'byi', # 0x28
'byig', # 0x29
'byigg', # 0x2a
'byigs', # 0x2b
'byin', # 0x2c
'byinj', # 0x2d
'byinh', # 0x2e
'byid', # 0x2f
'byil', # 0x30
'byilg', # 0x31
'byilm', # 0x32
'byilb', # 0x33
'byils', # 0x34
'byilt', # 0x35
'byilp', # 0x36
'byilh', # 0x37
'byim', # 0x38
'byib', # 0x39
'byibs', # 0x3a
'byis', # 0x3b
'byiss', # 0x3c
'bying', # 0x3d
'byij', # 0x3e
'byic', # 0x3f
'byik', # 0x40
'byit', # 0x41
'byip', # 0x42
'byih', # 0x43
'bi', # 0x44
'big', # 0x45
'bigg', # 0x46
'bigs', # 0x47
'bin', # 0x48
'binj', # 0x49
'binh', # 0x4a
'bid', # 0x4b
'bil', # 0x4c
'bilg', # 0x4d
'bilm', # 0x4e
'bilb', # 0x4f
'bils', # 0x50
'bilt', # 0x51
'bilp', # 0x52
'bilh', # 0x53
'bim', # 0x54
'bib', # 0x55
'bibs', # 0x56
'bis', # 0x57
'biss', # 0x58
'bing', # 0x59
'bij', # 0x5a
'bic', # 0x5b
'bik', # 0x5c
'bit', # 0x5d
'bip', # 0x5e
'bih', # 0x5f
'bba', # 0x60
'bbag', # 0x61
'bbagg', # 0x62
'bbags', # 0x63
'bban', # 0x64
'bbanj', # 0x65
'bbanh', # 0x66
'bbad', # 0x67
'bbal', # 0x68
'bbalg', # 0x69
'bbalm', # 0x6a
'bbalb', # 0x6b
'bbals', # 0x6c
'bbalt', # 0x6d
'bbalp', # 0x6e
'bbalh', # 0x6f
'bbam', # 0x70
'bbab', # 0x71
'bbabs', # 0x72
'bbas', # 0x73
'bbass', # 0x74
'bbang', # 0x75
'bbaj', # 0x76
'bbac', # 0x77
'bbak', # 0x78
'bbat', # 0x79
'bbap', # 0x7a
'bbah', # 0x7b
'bbae', # 0x7c
'bbaeg', # 0x7d
'bbaegg', # 0x7e
'bbaegs', # 0x7f
'bbaen', # 0x80
'bbaenj', # 0x81
'bbaenh', # 0x82
'bbaed', # 0x83
'bbael', # 0x84
'bbaelg', # 0x85
'bbaelm', # 0x86
'bbaelb', # 0x87
'bbaels', # 0x88
'bbaelt', # 0x89
'bbaelp', # 0x8a
'bbaelh', # 0x8b
'bbaem', # 0x8c
'bbaeb', # 0x8d
'bbaebs', # 0x8e
'bbaes', # 0x8f
'bbaess', # 0x90
'bbaeng', # 0x91
'bbaej', # 0x92
'bbaec', # 0x93
'bbaek', # 0x94
'bbaet', # 0x95
'bbaep', # 0x96
'bbaeh', # 0x97
'bbya', # 0x98
'bbyag', # 0x99
'bbyagg', # 0x9a
'bbyags', # 0x9b
'bbyan', # 0x9c
'bbyanj', # 0x9d
'bbyanh', # 0x9e
'bbyad', # 0x9f
'bbyal', # 0xa0
'bbyalg', # 0xa1
'bbyalm', # 0xa2
'bbyalb', # 0xa3
'bbyals', # 0xa4
'bbyalt', # 0xa5
'bbyalp', # 0xa6
'bbyalh', # 0xa7
'bbyam', # 0xa8
'bbyab', # 0xa9
'bbyabs', # 0xaa
'bbyas', # 0xab
'bbyass', # 0xac
'bbyang', # 0xad
'bbyaj', # 0xae
'bbyac', # 0xaf
'bbyak', # 0xb0
'bbyat', # 0xb1
'bbyap', # 0xb2
'bbyah', # 0xb3
'bbyae', # 0xb4
'bbyaeg', # 0xb5
'bbyaegg', # 0xb6
'bbyaegs', # 0xb7
'bbyaen', # 0xb8
'bbyaenj', # 0xb9
'bbyaenh', # 0xba
'bbyaed', # 0xbb
'bbyael', # 0xbc
'bbyaelg', # 0xbd
'bbyaelm', # 0xbe
'bbyaelb', # 0xbf
'bbyaels', # 0xc0
'bbyaelt', # 0xc1
'bbyaelp', # 0xc2
'bbyaelh', # 0xc3
'bbyaem', # 0xc4
'bbyaeb', # 0xc5
'bbyaebs', # 0xc6
'bbyaes', # 0xc7
'bbyaess', # 0xc8
'bbyaeng', # 0xc9
'bbyaej', # 0xca
'bbyaec', # 0xcb
'bbyaek', # 0xcc
'bbyaet', # 0xcd
'bbyaep', # 0xce
'bbyaeh', # 0xcf
'bbeo', # 0xd0
'bbeog', # 0xd1
'bbeogg', # 0xd2
'bbeogs', # 0xd3
'bbeon', # 0xd4
'bbeonj', # 0xd5
'bbeonh', # 0xd6
'bbeod', # 0xd7
'bbeol', # 0xd8
'bbeolg', # 0xd9
'bbeolm', # 0xda
'bbeolb', # 0xdb
'bbeols', # 0xdc
'bbeolt', # 0xdd
'bbeolp', # 0xde
'bbeolh', # 0xdf
'bbeom', # 0xe0
'bbeob', # 0xe1
'bbeobs', # 0xe2
'bbeos', # 0xe3
'bbeoss', # 0xe4
'bbeong', # 0xe5
'bbeoj', # 0xe6
'bbeoc', # 0xe7
'bbeok', # 0xe8
'bbeot', # 0xe9
'bbeop', # 0xea
'bbeoh', # 0xeb
'bbe', # 0xec
'bbeg', # 0xed
'bbegg', # 0xee
'bbegs', # 0xef
'bben', # 0xf0
'bbenj', # 0xf1
'bbenh', # 0xf2
'bbed', # 0xf3
'bbel', # 0xf4
'bbelg', # 0xf5
'bbelm', # 0xf6
'bbelb', # 0xf7
'bbels', # 0xf8
'bbelt', # 0xf9
'bbelp', # 0xfa
'bbelh', # 0xfb
'bbem', # 0xfc
'bbeb', # 0xfd
'bbebs', # 0xfe
'bbes', # 0xff
)
| gpl-3.0 |
python-recsys/mrec | mrec/evaluation/metrics.py | 1 | 5143 | """
Metrics to evaluate recommendations:
* with hit rate, following e.g. Karypis lab SLIM and FISM papers
* with prec@k and MRR
"""
import numpy as np
from collections import defaultdict
# classes to access known items for each test user
class get_known_items_from_dict(object):
def __init__(self,data):
self.data = data
def __call__(self,u):
return self.data[u]
class get_known_items_from_csr_matrix(object):
def __init__(self,data):
self.data = data
def __call__(self,u):
return self.data[u].indices
class get_known_items_from_thresholded_csr_matrix(object):
def __init__(self,data,min_value):
self.data = data
self.min_value = min_value
def __call__(self,u):
items = self.data[u].toarray().flatten()
items[items<self.min_value] = 0
return items.nonzero()
# methods to refit a model to a new training dataset
def retrain_recommender(model,dataset):
model.fit(dataset)
# methods for metric computation itself
def run_evaluation(models,retrain,get_split,num_runs,evaluation_func):
"""
This is the main entry point to run an evaluation.
Supply functions to retrain model, to get a new split of data on
each run, to get known items from the test set, and to compute the
metrics you want:
retrain(model,dataset) should retrain model
get_split() should return train_data,test_users,test_data
evaluation_func(model,users,test) should return a dict of metrics
A number of suitable functions are already available in the module.
"""
metrics = [defaultdict(list) for m in models]
for _ in xrange(num_runs):
train,users,test = get_split()
for i,model in enumerate(models):
retrain(model,train)
run_metrics = evaluation_func(model,train,users,test)
for m,val in run_metrics.iteritems():
print m,val
metrics[i][m].append(val)
return metrics
def generate_metrics(get_known_items,compute_metrics):
def evaluation_func(model,train,users,test):
return evaluate(model,train,users,get_known_items(test),compute_metrics)
return evaluation_func
def sort_metrics_by_name(names):
# group by name and number in "@n"
prefix2val = defaultdict(list)
for name in names:
parts = name.split('@')
name = parts[0]
if len(parts) > 1:
val = int(parts[1])
prefix2val[name].append(val)
else:
prefix2val[name] = []
for name,vals in prefix2val.iteritems():
prefix2val[name] = sorted(vals)
ret = []
for name,vals in prefix2val.iteritems():
if vals:
for val in vals:
ret.append('{0}@{1}'.format(name,val))
else:
ret.append(name)
return ret
def print_report(models,metrics):
"""
Call this to print out the metrics returned by run_evaluation().
"""
for model,results in zip(models,metrics):
print model
if hasattr(model,'similarity_matrix'):
nnz = model.similarity_matrix.nnz
num_items = model.similarity_matrix.shape[0]
density = float(model.similarity_matrix.nnz)/num_items**2
print 'similarity matrix nnz = {0} (density {1:.3f})'.format(nnz,density)
for m in sort_metrics_by_name(results.keys()):
vals = results[m]
mean = np.mean(vals)
std = np.std(vals)
stderr = std/len(vals)**0.5
print '{0}{1:.4f} +/- {2:.4f}'.format(m.ljust(15),mean,stderr)
def evaluate(model,train,users,get_known_items,compute_metrics):
avg_metrics = defaultdict(float)
count = 0
for u in users:
recommended = [r for r,_ in model.recommend_items(train,u,max_items=20)]
metrics = compute_metrics(recommended,get_known_items(u))
if metrics:
for m,val in metrics.iteritems():
avg_metrics[m] += val
count += 1
for m in avg_metrics:
avg_metrics[m] /= float(count)
return avg_metrics
# collections of metrics
def compute_main_metrics(recommended,known):
if not known:
return None
return {'prec@5':prec(recommended,known,5),
'prec@10':prec(recommended,known,10),
'prec@15':prec(recommended,known,15),
'prec@20':prec(recommended,known,20),
'mrr':mrr(recommended,known)}
def compute_hit_rate(recommended,known):
if not known:
return None
return {'hit rate@10':hit_rate(recommended,known,10)}
# individual metrics
def prec(predicted,true,k):
if not predicted:
return 0
correct = len(set(predicted[:k]).intersection(set(true)))
return float(correct)/len(predicted[:k])
def hit_rate(predicted,true,k):
assert(len(true)==1)
return int(true[0] in predicted[:k])
#return int(len(set(predicted[:k]).intersection(set(true)))>0)
def mrr(predicted,true):
# TODO: NB we'll under report this as our predictions are truncated
for i,x in enumerate(predicted):
if x in true:
return 1.0/(i+1)
return 0
| bsd-3-clause |
usakhelo/FreeCAD | src/Mod/Fem/Init.py | 2 | 3004 | # FreeCAD init script of the Fem module
# (c) 2001 Juergen Riegel
# ***************************************************************************
# * (c) Juergen Riegel (juergen.riegel@web.de) 2002 *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * FreeCAD is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Lesser General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with FreeCAD; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# * Juergen Riegel 2002 *
# ***************************************************************************/
import FreeCAD
FreeCAD.addExportType("TetGen file (*.poly)", "convert2TetGen")
FreeCAD.addImportType("FEM formats (*.unv *.med *.dat *.bdf)", "Fem")
if("BUILD_FEM_VTK" in FreeCAD.__cmake__):
FreeCAD.addImportType("FEM CFD Unstructure Mesh (*.vtk *.vtu)", "Fem")
FreeCAD.addExportType("FEM CFD Unstructure Mesh (*.vtk *.vtu)", "Fem")
FreeCAD.addImportType("FEM results (*.vtk *.vtu)", "importVTKResults")
FreeCAD.addExportType("FEM CFD Result in VTK format (*.vtk *.vtu)", "importVTKResults")
FreeCAD.addExportType("FEM formats (*.unv *.med *.dat *.inp)", "Fem")
FreeCAD.addImportType("CalculiX result (*.frd)", "importCcxFrdResults")
FreeCAD.addImportType("Fenics mesh file (*.xml)", "importFenicsMesh")
FreeCAD.addExportType("Fenics mesh file (*.xml)", "importFenicsMesh")
FreeCAD.addImportType("Mesh from Calculix/Abaqus input file (*.inp)", "importInpMesh")
FreeCAD.addImportType("Z88 mesh file (*.txt)", "importZ88Mesh")
FreeCAD.addExportType("Z88 mesh file (*.txt)", "importZ88Mesh")
FreeCAD.addImportType("Z88 displacement (o2) result file (*.txt)", "importZ88O2Results")
| lgpl-2.1 |
davehunt/bedrock | bedrock/newsletter/utils.py | 26 | 2669 | from hashlib import md5
from django.conf import settings
from django.core.cache import cache
import basket
import commonware.log
log = commonware.log.getLogger('b.newsletter')
NEWSLETTERS_CACHE_KEY = "newsletter-data"
NEWSLETTERS_CACHE_TIMEOUT = 3600 # 1 hour
def get_newsletters():
"""Return a dictionary with our information about newsletters.
Keys are the internal keys we use to designate newsletters to basket.
Values are dictionaries with the remaining newsletter information.
If we cannot get through to basket, return a default set of newsletters
from settings.DEFAULT_NEWSLETTERS
"""
# Get the newsletter data from basket - it's a dictionary of dictionaries
# Cache it for a little while (300 secs = 5 minutes)
data = cache.get(NEWSLETTERS_CACHE_KEY)
if data is None:
try:
data = basket.get_newsletters()
except basket.BasketException:
log.exception("Error getting newsletters from basket")
return settings.DEFAULT_NEWSLETTERS
# Cache for an hour - newsletters very rarely change
cache.set(NEWSLETTERS_CACHE_KEY, data, NEWSLETTERS_CACHE_TIMEOUT)
return data
def get_languages_for_newsletters(newsletters=None):
"""Return a set of language codes supported by the newsletters.
If no newsletters are provided, it will return language codes
supported by all newsletters.
These are 2-letter language codes and `do not` include the country part,
even if the newsletter languages list does. E.g. this returns 'pt',
not 'pt-Br'
"""
cache_key = 'newsletter:languages:' + md5(repr(newsletters)).hexdigest()
langs = cache.get(cache_key)
if langs is None:
all_newsletters = get_newsletters()
if newsletters is None:
newsletters = all_newsletters.values()
else:
if isinstance(newsletters, basestring):
newsletters = [nl.strip() for nl in newsletters.split(',')]
newsletters = [all_newsletters.get(nl, {}) for nl in newsletters]
langs = set()
for newsletter in newsletters:
langs.update(lang[:2].lower() for lang in newsletter.get('languages', []))
cache.set(cache_key, langs, NEWSLETTERS_CACHE_TIMEOUT)
return langs
def custom_unsub_reason(token, reason):
"""Call basket. Pass along their reason for unsubscribing.
This is calling a basket API that's custom to Mozilla, that's
why there's not a helper in the basket-client package."""
data = {
'token': token,
'reason': reason,
}
return basket.request('post', 'custom_unsub_reason', data=data)
| mpl-2.0 |
mining/mining | mining/models/cube.py | 4 | 3819 | # -*- coding: utf-8 -*-
import gc
import pandas
from datetime import datetime
from pandas import DataFrame
from sqlalchemy import create_engine
from sqlalchemy.sql import text
from sqlalchemy.orm import sessionmaker
from mining.utils import conf, log_it
from mining.utils._pandas import fix_render
from mining.db import DataWarehouse
from bottle.ext.mongo import MongoPlugin
class Cube(object):
def __init__(self, _cube):
log_it("START: {}".format(_cube['slug']), "bin-mining")
self.mongo = MongoPlugin(
uri=conf("mongodb")["uri"],
db=conf("mongodb")["db"],
json_mongo=True).get_mongo()
try:
del _cube['_id']
except KeyError:
pass
self.cube = _cube
self.slug = self.cube['slug']
def load(self):
self.cube['run'] = 'run'
self.mongo['cube'].update({'slug': self.slug}, self.cube)
self.cube['start_process'] = datetime.now()
_sql = self.cube['sql']
if _sql[-1] == ';':
_sql = _sql[:-1]
self.sql = u"""SELECT * FROM ({}) AS CUBE;""".format(_sql)
self.connection = self.mongo['connection'].find_one({
'slug': self.cube['connection']})['connection']
log_it("CONNECT IN RELATION DATA BASE: {}".format(self.slug),
"bin-mining")
if 'sqlite' in self.connection:
e = create_engine(self.connection)
else:
e = create_engine(self.connection,
**conf('openmining')['sql_conn_params'])
Session = sessionmaker(bind=e)
session = Session()
resoverall = session.execute(text(self.sql))
self.data = resoverall.fetchall()
self.keys = resoverall.keys()
def environment(self, t):
if t not in ['relational']:
self.sql = t
def _data(self, data):
self.data = data
def _keys(self, keys):
if type(keys) == list:
self.keys = keys
self.keys = list(keys)
def frame(self, data_type=None):
log_it("LOAD DATA ON DATAWAREHOUSE via {}: {}".format(
data_type or 'dict', self.slug), "bin-mining")
if data_type:
self.df = getattr(pandas, "read_{}".format(data_type))(self.data)
else:
self.df = DataFrame(self.data)
if self.df.empty:
self.pdict = {}
log_it('[warning]Empty cube: {}!!'.format(self.cube),
"bin-mining")
return
try:
self.df.columns = self.keys
except AttributeError:
self._keys(self.df.columns.tolist())
# If the OML is active, it renders the script that there is
if conf("oml").get("on") and self.cube.get("oml"):
from oml import RunTime
self.df.columns = self.keys
df = RunTime(conf("oml").get("language", "lua"),
self.df.to_dict(orient='records'),
self.cube.get("oml"),
conf("oml").get("class", {"OML": "oml.base.OMLBase"}))
self.df = DataFrame(df)
self._keys(self.df.columns.tolist())
self.df.head()
self.pdict = map(fix_render, self.df.to_dict(orient='records'))
def save(self):
log_it("SAVE DATA (JSON) ON DATA WAREHOUSE: {}".format(self.slug),
"bin-mining")
data = {'data': self.pdict, 'columns': self.keys}
DW = DataWarehouse()
DW.save(self.slug, data)
self.cube['status'] = True
self.cube['lastupdate'] = datetime.now()
self.cube['run'] = True
self.mongo['cube'].update({'slug': self.cube['slug']}, self.cube)
log_it("CLEAN MEMORY: {}".format(self.slug), "bin-mining")
gc.collect()
| mit |
leafclick/intellij-community | python/helpers/py2only/docutils/parsers/rst/languages/zh_cn.py | 128 | 4007 | # -*- coding: utf-8 -*-
# $Id: zh_cn.py 7119 2011-09-02 13:00:23Z milde $
# Author: Panjunyong <panjy@zopechina.com>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Simplified Chinese language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'注意': 'attention',
u'小心': 'caution',
u'code (translation required)': 'code',
u'危险': 'danger',
u'错误': 'error',
u'提示': 'hint',
u'重要': 'important',
u'注解': 'note',
u'技巧': 'tip',
u'警告': 'warning',
u'忠告': 'admonition',
u'侧框': 'sidebar',
u'主题': 'topic',
u'line-block (translation required)': 'line-block',
u'parsed-literal (translation required)': 'parsed-literal',
u'醒目': 'rubric',
u'铭文': 'epigraph',
u'要点': 'highlights',
u'pull-quote (translation required)': 'pull-quote',
u'复合': 'compound',
u'容器': 'container',
#u'questions (translation required)': 'questions',
u'表格': 'table',
u'csv表格': 'csv-table',
u'列表表格': 'list-table',
#u'qa (translation required)': 'questions',
#u'faq (translation required)': 'questions',
u'元数据': 'meta',
u'math (translation required)': 'math',
#u'imagemap (translation required)': 'imagemap',
u'图片': 'image',
u'图例': 'figure',
u'包含': 'include',
u'原文': 'raw',
u'代替': 'replace',
u'统一码': 'unicode',
u'日期': 'date',
u'类型': 'class',
u'角色': 'role',
u'默认角色': 'default-role',
u'标题': 'title',
u'目录': 'contents',
u'章节序号': 'sectnum',
u'题头': 'header',
u'页脚': 'footer',
#u'footnotes (translation required)': 'footnotes',
#u'citations (translation required)': 'citations',
u'target-notes (translation required)': 'target-notes',
u'restructuredtext-test-directive': 'restructuredtext-test-directive'}
"""Simplified Chinese name to registered (in directives/__init__.py)
directive name mapping."""
roles = {
# language-dependent: fixed
u'缩写': 'abbreviation',
u'简称': 'acronym',
u'code (translation required)': 'code',
u'index (translation required)': 'index',
u'i (translation required)': 'index',
u'下标': 'subscript',
u'上标': 'superscript',
u'title-reference (translation required)': 'title-reference',
u'title (translation required)': 'title-reference',
u't (translation required)': 'title-reference',
u'pep-reference (translation required)': 'pep-reference',
u'pep (translation required)': 'pep-reference',
u'rfc-reference (translation required)': 'rfc-reference',
u'rfc (translation required)': 'rfc-reference',
u'强调': 'emphasis',
u'加粗': 'strong',
u'字面': 'literal',
u'math (translation required)': 'math',
u'named-reference (translation required)': 'named-reference',
u'anonymous-reference (translation required)': 'anonymous-reference',
u'footnote-reference (translation required)': 'footnote-reference',
u'citation-reference (translation required)': 'citation-reference',
u'substitution-reference (translation required)': 'substitution-reference',
u'target (translation required)': 'target',
u'uri-reference (translation required)': 'uri-reference',
u'uri (translation required)': 'uri-reference',
u'url (translation required)': 'uri-reference',
u'raw (translation required)': 'raw',}
"""Mapping of Simplified Chinese role names to canonical role names
for interpreted text."""
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.