code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
import torch.nn as nn
from layers import maskAConv, MaskBConvBlock
class PixelCNN(nn.Module):
def __init__(self, n_channel=3, h=128, discrete_channel=256):
"""PixelCNN Model"""
super(PixelCNN, self).__init__()
self.discrete_channel = discrete_channel
self.MaskAConv = maskAConv(n_channel, 2 * h, k_size=7, stride=1, pad=3)
MaskBConv = []
for i in range(15):
MaskBConv.append(MaskBConvBlock(h, k_size=3, stride=1, pad=1))
self.MaskBConv = nn.Sequential(*MaskBConv)
# 1x1 conv to 3x256 channels
self.out = nn.Sequential(
nn.ReLU(),
nn.Conv2d(2 * h, 1024, kernel_size=1, stride=1, padding=0),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024, n_channel * discrete_channel, kernel_size=1, stride=1, padding=0))
def forward(self, x):
"""
Args:
x: [batch_size, channel, height, width]
Return:
out [batch_size, channel, height, width, 256]
"""
batch_size, c_in, height, width = x.size()
# [batch_size, 2h, 32, 32]
x = self.MaskAConv(x)
# [batch_size, 2h, 32, 32]
x = self.MaskBConv(x)
# [batch_size, 3x256, 32, 32]
x = self.out(x)
# [batch_size, 3, 256, 32, 32]
x = x.view(batch_size, c_in, self.discrete_channel, height, width)
# [batch_size, 3, 32, 32, 256]
x = x.permute(0, 1, 3, 4, 2)
return x
|
normal
|
{
"blob_id": "3185b6b1902099caed66ce6f97cd1b9940261fc1",
"index": 7533,
"step-1": "<mask token>\n\n\nclass PixelCNN(nn.Module):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass PixelCNN(nn.Module):\n <mask token>\n\n def forward(self, x):\n \"\"\"\n Args:\n x: [batch_size, channel, height, width]\n Return:\n out [batch_size, channel, height, width, 256]\n \"\"\"\n batch_size, c_in, height, width = x.size()\n x = self.MaskAConv(x)\n x = self.MaskBConv(x)\n x = self.out(x)\n x = x.view(batch_size, c_in, self.discrete_channel, height, width)\n x = x.permute(0, 1, 3, 4, 2)\n return x\n",
"step-3": "<mask token>\n\n\nclass PixelCNN(nn.Module):\n\n def __init__(self, n_channel=3, h=128, discrete_channel=256):\n \"\"\"PixelCNN Model\"\"\"\n super(PixelCNN, self).__init__()\n self.discrete_channel = discrete_channel\n self.MaskAConv = maskAConv(n_channel, 2 * h, k_size=7, stride=1, pad=3)\n MaskBConv = []\n for i in range(15):\n MaskBConv.append(MaskBConvBlock(h, k_size=3, stride=1, pad=1))\n self.MaskBConv = nn.Sequential(*MaskBConv)\n self.out = nn.Sequential(nn.ReLU(), nn.Conv2d(2 * h, 1024,\n kernel_size=1, stride=1, padding=0), nn.BatchNorm2d(1024), nn.\n ReLU(), nn.Conv2d(1024, n_channel * discrete_channel,\n kernel_size=1, stride=1, padding=0))\n\n def forward(self, x):\n \"\"\"\n Args:\n x: [batch_size, channel, height, width]\n Return:\n out [batch_size, channel, height, width, 256]\n \"\"\"\n batch_size, c_in, height, width = x.size()\n x = self.MaskAConv(x)\n x = self.MaskBConv(x)\n x = self.out(x)\n x = x.view(batch_size, c_in, self.discrete_channel, height, width)\n x = x.permute(0, 1, 3, 4, 2)\n return x\n",
"step-4": "import torch.nn as nn\nfrom layers import maskAConv, MaskBConvBlock\n\n\nclass PixelCNN(nn.Module):\n\n def __init__(self, n_channel=3, h=128, discrete_channel=256):\n \"\"\"PixelCNN Model\"\"\"\n super(PixelCNN, self).__init__()\n self.discrete_channel = discrete_channel\n self.MaskAConv = maskAConv(n_channel, 2 * h, k_size=7, stride=1, pad=3)\n MaskBConv = []\n for i in range(15):\n MaskBConv.append(MaskBConvBlock(h, k_size=3, stride=1, pad=1))\n self.MaskBConv = nn.Sequential(*MaskBConv)\n self.out = nn.Sequential(nn.ReLU(), nn.Conv2d(2 * h, 1024,\n kernel_size=1, stride=1, padding=0), nn.BatchNorm2d(1024), nn.\n ReLU(), nn.Conv2d(1024, n_channel * discrete_channel,\n kernel_size=1, stride=1, padding=0))\n\n def forward(self, x):\n \"\"\"\n Args:\n x: [batch_size, channel, height, width]\n Return:\n out [batch_size, channel, height, width, 256]\n \"\"\"\n batch_size, c_in, height, width = x.size()\n x = self.MaskAConv(x)\n x = self.MaskBConv(x)\n x = self.out(x)\n x = x.view(batch_size, c_in, self.discrete_channel, height, width)\n x = x.permute(0, 1, 3, 4, 2)\n return x\n",
"step-5": "import torch.nn as nn\nfrom layers import maskAConv, MaskBConvBlock\n\n\nclass PixelCNN(nn.Module):\n def __init__(self, n_channel=3, h=128, discrete_channel=256):\n \"\"\"PixelCNN Model\"\"\"\n super(PixelCNN, self).__init__()\n\n self.discrete_channel = discrete_channel\n\n self.MaskAConv = maskAConv(n_channel, 2 * h, k_size=7, stride=1, pad=3)\n MaskBConv = []\n for i in range(15):\n MaskBConv.append(MaskBConvBlock(h, k_size=3, stride=1, pad=1))\n self.MaskBConv = nn.Sequential(*MaskBConv)\n\n # 1x1 conv to 3x256 channels\n self.out = nn.Sequential(\n nn.ReLU(),\n nn.Conv2d(2 * h, 1024, kernel_size=1, stride=1, padding=0),\n nn.BatchNorm2d(1024),\n nn.ReLU(),\n nn.Conv2d(1024, n_channel * discrete_channel, kernel_size=1, stride=1, padding=0))\n\n def forward(self, x):\n \"\"\"\n Args:\n x: [batch_size, channel, height, width]\n Return:\n out [batch_size, channel, height, width, 256]\n \"\"\"\n batch_size, c_in, height, width = x.size()\n\n # [batch_size, 2h, 32, 32]\n x = self.MaskAConv(x)\n\n # [batch_size, 2h, 32, 32]\n x = self.MaskBConv(x)\n\n # [batch_size, 3x256, 32, 32]\n x = self.out(x)\n\n # [batch_size, 3, 256, 32, 32]\n x = x.view(batch_size, c_in, self.discrete_channel, height, width)\n\n # [batch_size, 3, 32, 32, 256]\n x = x.permute(0, 1, 3, 4, 2)\n\n return x\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class TextSplitterConfig(BaseTextProcessorConfig):
max_split_length: int = 512
split_stride: int = 0
document_id_key: Optional[str]
class TextSplitter(BaseTextPreprocessor):
def preprocess_input(self, input_list: List[TextPayload], config:
TextSplitterConfig, **kwargs) ->List[TextPayload]:
text_splits: List[TextPayload] = []
for idx, input_data in enumerate(input_list):
if (config.document_id_key and input_data.meta and config.
document_id_key in input_data.meta):
document_id = str(input_data.meta.get(config.document_id_key))
else:
document_id = uuid.uuid4().hex
start_idx = 0
split_id = 0
document_splits: List[TextSplitterPayload] = []
document_length = len(input_data.processed_text)
while start_idx < document_length:
if config.split_stride > 0 and start_idx > 0:
start_idx = self._valid_index(input_data.processed_text,
start_idx - config.split_stride) + 1
end_idx = self._valid_index(input_data.processed_text, min(
start_idx + config.max_split_length, document_length))
phrase = input_data.processed_text[start_idx:end_idx]
document_splits.append(TextSplitterPayload(phrase=phrase,
chunk_id=split_id, chunk_length=len(phrase),
start_index=start_idx, end_index=end_idx, document_id=
document_id, text_length=document_length))
start_idx = end_idx + 1
split_id += 1
total_splits = len(document_splits)
for split in document_splits:
split.total_chunks = total_splits
payload = TextPayload(processed_text=split.phrase,
source_name=input_data.source_name, segmented_data=
input_data.segmented_data, meta={**input_data.meta, **{
'splitter': split}} if input_data.meta else {'splitter':
split})
text_splits.append(payload)
return text_splits
@staticmethod
def _valid_index(document: str, idx: int):
if idx <= 0:
return 0
if idx >= len(document):
return len(document)
new_idx = idx
while new_idx > 0:
if document[new_idx] in [' ', '\n', '\t']:
break
new_idx -= 1
return new_idx
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TextSplitterPayload(BaseModel):
phrase: str
chunk_id: int
chunk_length: int
start_index: int
end_index: int
document_id: str
text_length: int
total_chunks: Optional[int]
class TextSplitterConfig(BaseTextProcessorConfig):
max_split_length: int = 512
split_stride: int = 0
document_id_key: Optional[str]
class TextSplitter(BaseTextPreprocessor):
def preprocess_input(self, input_list: List[TextPayload], config:
TextSplitterConfig, **kwargs) ->List[TextPayload]:
text_splits: List[TextPayload] = []
for idx, input_data in enumerate(input_list):
if (config.document_id_key and input_data.meta and config.
document_id_key in input_data.meta):
document_id = str(input_data.meta.get(config.document_id_key))
else:
document_id = uuid.uuid4().hex
start_idx = 0
split_id = 0
document_splits: List[TextSplitterPayload] = []
document_length = len(input_data.processed_text)
while start_idx < document_length:
if config.split_stride > 0 and start_idx > 0:
start_idx = self._valid_index(input_data.processed_text,
start_idx - config.split_stride) + 1
end_idx = self._valid_index(input_data.processed_text, min(
start_idx + config.max_split_length, document_length))
phrase = input_data.processed_text[start_idx:end_idx]
document_splits.append(TextSplitterPayload(phrase=phrase,
chunk_id=split_id, chunk_length=len(phrase),
start_index=start_idx, end_index=end_idx, document_id=
document_id, text_length=document_length))
start_idx = end_idx + 1
split_id += 1
total_splits = len(document_splits)
for split in document_splits:
split.total_chunks = total_splits
payload = TextPayload(processed_text=split.phrase,
source_name=input_data.source_name, segmented_data=
input_data.segmented_data, meta={**input_data.meta, **{
'splitter': split}} if input_data.meta else {'splitter':
split})
text_splits.append(payload)
return text_splits
@staticmethod
def _valid_index(document: str, idx: int):
if idx <= 0:
return 0
if idx >= len(document):
return len(document)
new_idx = idx
while new_idx > 0:
if document[new_idx] in [' ', '\n', '\t']:
break
new_idx -= 1
return new_idx
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = logging.getLogger(__name__)
class TextSplitterPayload(BaseModel):
phrase: str
chunk_id: int
chunk_length: int
start_index: int
end_index: int
document_id: str
text_length: int
total_chunks: Optional[int]
class TextSplitterConfig(BaseTextProcessorConfig):
max_split_length: int = 512
split_stride: int = 0
document_id_key: Optional[str]
class TextSplitter(BaseTextPreprocessor):
def preprocess_input(self, input_list: List[TextPayload], config:
TextSplitterConfig, **kwargs) ->List[TextPayload]:
text_splits: List[TextPayload] = []
for idx, input_data in enumerate(input_list):
if (config.document_id_key and input_data.meta and config.
document_id_key in input_data.meta):
document_id = str(input_data.meta.get(config.document_id_key))
else:
document_id = uuid.uuid4().hex
start_idx = 0
split_id = 0
document_splits: List[TextSplitterPayload] = []
document_length = len(input_data.processed_text)
while start_idx < document_length:
if config.split_stride > 0 and start_idx > 0:
start_idx = self._valid_index(input_data.processed_text,
start_idx - config.split_stride) + 1
end_idx = self._valid_index(input_data.processed_text, min(
start_idx + config.max_split_length, document_length))
phrase = input_data.processed_text[start_idx:end_idx]
document_splits.append(TextSplitterPayload(phrase=phrase,
chunk_id=split_id, chunk_length=len(phrase),
start_index=start_idx, end_index=end_idx, document_id=
document_id, text_length=document_length))
start_idx = end_idx + 1
split_id += 1
total_splits = len(document_splits)
for split in document_splits:
split.total_chunks = total_splits
payload = TextPayload(processed_text=split.phrase,
source_name=input_data.source_name, segmented_data=
input_data.segmented_data, meta={**input_data.meta, **{
'splitter': split}} if input_data.meta else {'splitter':
split})
text_splits.append(payload)
return text_splits
@staticmethod
def _valid_index(document: str, idx: int):
if idx <= 0:
return 0
if idx >= len(document):
return len(document)
new_idx = idx
while new_idx > 0:
if document[new_idx] in [' ', '\n', '\t']:
break
new_idx -= 1
return new_idx
<|reserved_special_token_1|>
import logging
from typing import List, Optional
import uuid
from pydantic import BaseModel
from obsei.payload import TextPayload
from obsei.preprocessor.base_preprocessor import BaseTextPreprocessor, BaseTextProcessorConfig
logger = logging.getLogger(__name__)
class TextSplitterPayload(BaseModel):
phrase: str
chunk_id: int
chunk_length: int
start_index: int
end_index: int
document_id: str
text_length: int
total_chunks: Optional[int]
class TextSplitterConfig(BaseTextProcessorConfig):
max_split_length: int = 512
split_stride: int = 0
document_id_key: Optional[str]
class TextSplitter(BaseTextPreprocessor):
def preprocess_input(self, input_list: List[TextPayload], config:
TextSplitterConfig, **kwargs) ->List[TextPayload]:
text_splits: List[TextPayload] = []
for idx, input_data in enumerate(input_list):
if (config.document_id_key and input_data.meta and config.
document_id_key in input_data.meta):
document_id = str(input_data.meta.get(config.document_id_key))
else:
document_id = uuid.uuid4().hex
start_idx = 0
split_id = 0
document_splits: List[TextSplitterPayload] = []
document_length = len(input_data.processed_text)
while start_idx < document_length:
if config.split_stride > 0 and start_idx > 0:
start_idx = self._valid_index(input_data.processed_text,
start_idx - config.split_stride) + 1
end_idx = self._valid_index(input_data.processed_text, min(
start_idx + config.max_split_length, document_length))
phrase = input_data.processed_text[start_idx:end_idx]
document_splits.append(TextSplitterPayload(phrase=phrase,
chunk_id=split_id, chunk_length=len(phrase),
start_index=start_idx, end_index=end_idx, document_id=
document_id, text_length=document_length))
start_idx = end_idx + 1
split_id += 1
total_splits = len(document_splits)
for split in document_splits:
split.total_chunks = total_splits
payload = TextPayload(processed_text=split.phrase,
source_name=input_data.source_name, segmented_data=
input_data.segmented_data, meta={**input_data.meta, **{
'splitter': split}} if input_data.meta else {'splitter':
split})
text_splits.append(payload)
return text_splits
@staticmethod
def _valid_index(document: str, idx: int):
if idx <= 0:
return 0
if idx >= len(document):
return len(document)
new_idx = idx
while new_idx > 0:
if document[new_idx] in [' ', '\n', '\t']:
break
new_idx -= 1
return new_idx
<|reserved_special_token_1|>
import logging
from typing import List, Optional
import uuid
from pydantic import BaseModel
from obsei.payload import TextPayload
from obsei.preprocessor.base_preprocessor import (
BaseTextPreprocessor,
BaseTextProcessorConfig,
)
logger = logging.getLogger(__name__)
class TextSplitterPayload(BaseModel):
phrase: str
chunk_id: int
chunk_length: int
start_index: int
end_index: int
document_id: str
text_length: int
total_chunks: Optional[int]
class TextSplitterConfig(BaseTextProcessorConfig):
max_split_length: int = 512
split_stride: int = 0 # overlap length
document_id_key: Optional[str] # document_id in meta
class TextSplitter(BaseTextPreprocessor):
def preprocess_input( # type: ignore[override]
self, input_list: List[TextPayload], config: TextSplitterConfig, **kwargs
) -> List[TextPayload]:
text_splits: List[TextPayload] = []
for idx, input_data in enumerate(input_list):
if (
config.document_id_key
and input_data.meta
and config.document_id_key in input_data.meta
):
document_id = str(input_data.meta.get(config.document_id_key))
else:
document_id = uuid.uuid4().hex
start_idx = 0
split_id = 0
document_splits: List[TextSplitterPayload] = []
document_length = len(input_data.processed_text)
while start_idx < document_length:
if config.split_stride > 0 and start_idx > 0:
start_idx = (
self._valid_index(
input_data.processed_text, start_idx - config.split_stride
)
+ 1
)
end_idx = self._valid_index(
input_data.processed_text,
min(start_idx + config.max_split_length, document_length),
)
phrase = input_data.processed_text[start_idx:end_idx]
document_splits.append(
TextSplitterPayload(
phrase=phrase,
chunk_id=split_id,
chunk_length=len(phrase),
start_index=start_idx,
end_index=end_idx,
document_id=document_id,
text_length=document_length,
)
)
start_idx = end_idx + 1
split_id += 1
total_splits = len(document_splits)
for split in document_splits:
split.total_chunks = total_splits
payload = TextPayload(
processed_text=split.phrase,
source_name=input_data.source_name,
segmented_data=input_data.segmented_data,
meta={**input_data.meta, **{"splitter": split}}
if input_data.meta
else {"splitter": split},
)
text_splits.append(payload)
return text_splits
@staticmethod
def _valid_index(document: str, idx: int):
if idx <= 0:
return 0
if idx >= len(document):
return len(document)
new_idx = idx
while new_idx > 0:
if document[new_idx] in [" ", "\n", "\t"]:
break
new_idx -= 1
return new_idx
|
flexible
|
{
"blob_id": "151cc71ff1a63897238e2cc55269bd20cc6ee577",
"index": 2336,
"step-1": "<mask token>\n\n\nclass TextSplitterConfig(BaseTextProcessorConfig):\n max_split_length: int = 512\n split_stride: int = 0\n document_id_key: Optional[str]\n\n\nclass TextSplitter(BaseTextPreprocessor):\n\n def preprocess_input(self, input_list: List[TextPayload], config:\n TextSplitterConfig, **kwargs) ->List[TextPayload]:\n text_splits: List[TextPayload] = []\n for idx, input_data in enumerate(input_list):\n if (config.document_id_key and input_data.meta and config.\n document_id_key in input_data.meta):\n document_id = str(input_data.meta.get(config.document_id_key))\n else:\n document_id = uuid.uuid4().hex\n start_idx = 0\n split_id = 0\n document_splits: List[TextSplitterPayload] = []\n document_length = len(input_data.processed_text)\n while start_idx < document_length:\n if config.split_stride > 0 and start_idx > 0:\n start_idx = self._valid_index(input_data.processed_text,\n start_idx - config.split_stride) + 1\n end_idx = self._valid_index(input_data.processed_text, min(\n start_idx + config.max_split_length, document_length))\n phrase = input_data.processed_text[start_idx:end_idx]\n document_splits.append(TextSplitterPayload(phrase=phrase,\n chunk_id=split_id, chunk_length=len(phrase),\n start_index=start_idx, end_index=end_idx, document_id=\n document_id, text_length=document_length))\n start_idx = end_idx + 1\n split_id += 1\n total_splits = len(document_splits)\n for split in document_splits:\n split.total_chunks = total_splits\n payload = TextPayload(processed_text=split.phrase,\n source_name=input_data.source_name, segmented_data=\n input_data.segmented_data, meta={**input_data.meta, **{\n 'splitter': split}} if input_data.meta else {'splitter':\n split})\n text_splits.append(payload)\n return text_splits\n\n @staticmethod\n def _valid_index(document: str, idx: int):\n if idx <= 0:\n return 0\n if idx >= len(document):\n return len(document)\n new_idx = idx\n while new_idx > 0:\n if document[new_idx] in [' ', '\\n', '\\t']:\n break\n new_idx -= 1\n return new_idx\n",
"step-2": "<mask token>\n\n\nclass TextSplitterPayload(BaseModel):\n phrase: str\n chunk_id: int\n chunk_length: int\n start_index: int\n end_index: int\n document_id: str\n text_length: int\n total_chunks: Optional[int]\n\n\nclass TextSplitterConfig(BaseTextProcessorConfig):\n max_split_length: int = 512\n split_stride: int = 0\n document_id_key: Optional[str]\n\n\nclass TextSplitter(BaseTextPreprocessor):\n\n def preprocess_input(self, input_list: List[TextPayload], config:\n TextSplitterConfig, **kwargs) ->List[TextPayload]:\n text_splits: List[TextPayload] = []\n for idx, input_data in enumerate(input_list):\n if (config.document_id_key and input_data.meta and config.\n document_id_key in input_data.meta):\n document_id = str(input_data.meta.get(config.document_id_key))\n else:\n document_id = uuid.uuid4().hex\n start_idx = 0\n split_id = 0\n document_splits: List[TextSplitterPayload] = []\n document_length = len(input_data.processed_text)\n while start_idx < document_length:\n if config.split_stride > 0 and start_idx > 0:\n start_idx = self._valid_index(input_data.processed_text,\n start_idx - config.split_stride) + 1\n end_idx = self._valid_index(input_data.processed_text, min(\n start_idx + config.max_split_length, document_length))\n phrase = input_data.processed_text[start_idx:end_idx]\n document_splits.append(TextSplitterPayload(phrase=phrase,\n chunk_id=split_id, chunk_length=len(phrase),\n start_index=start_idx, end_index=end_idx, document_id=\n document_id, text_length=document_length))\n start_idx = end_idx + 1\n split_id += 1\n total_splits = len(document_splits)\n for split in document_splits:\n split.total_chunks = total_splits\n payload = TextPayload(processed_text=split.phrase,\n source_name=input_data.source_name, segmented_data=\n input_data.segmented_data, meta={**input_data.meta, **{\n 'splitter': split}} if input_data.meta else {'splitter':\n split})\n text_splits.append(payload)\n return text_splits\n\n @staticmethod\n def _valid_index(document: str, idx: int):\n if idx <= 0:\n return 0\n if idx >= len(document):\n return len(document)\n new_idx = idx\n while new_idx > 0:\n if document[new_idx] in [' ', '\\n', '\\t']:\n break\n new_idx -= 1\n return new_idx\n",
"step-3": "<mask token>\nlogger = logging.getLogger(__name__)\n\n\nclass TextSplitterPayload(BaseModel):\n phrase: str\n chunk_id: int\n chunk_length: int\n start_index: int\n end_index: int\n document_id: str\n text_length: int\n total_chunks: Optional[int]\n\n\nclass TextSplitterConfig(BaseTextProcessorConfig):\n max_split_length: int = 512\n split_stride: int = 0\n document_id_key: Optional[str]\n\n\nclass TextSplitter(BaseTextPreprocessor):\n\n def preprocess_input(self, input_list: List[TextPayload], config:\n TextSplitterConfig, **kwargs) ->List[TextPayload]:\n text_splits: List[TextPayload] = []\n for idx, input_data in enumerate(input_list):\n if (config.document_id_key and input_data.meta and config.\n document_id_key in input_data.meta):\n document_id = str(input_data.meta.get(config.document_id_key))\n else:\n document_id = uuid.uuid4().hex\n start_idx = 0\n split_id = 0\n document_splits: List[TextSplitterPayload] = []\n document_length = len(input_data.processed_text)\n while start_idx < document_length:\n if config.split_stride > 0 and start_idx > 0:\n start_idx = self._valid_index(input_data.processed_text,\n start_idx - config.split_stride) + 1\n end_idx = self._valid_index(input_data.processed_text, min(\n start_idx + config.max_split_length, document_length))\n phrase = input_data.processed_text[start_idx:end_idx]\n document_splits.append(TextSplitterPayload(phrase=phrase,\n chunk_id=split_id, chunk_length=len(phrase),\n start_index=start_idx, end_index=end_idx, document_id=\n document_id, text_length=document_length))\n start_idx = end_idx + 1\n split_id += 1\n total_splits = len(document_splits)\n for split in document_splits:\n split.total_chunks = total_splits\n payload = TextPayload(processed_text=split.phrase,\n source_name=input_data.source_name, segmented_data=\n input_data.segmented_data, meta={**input_data.meta, **{\n 'splitter': split}} if input_data.meta else {'splitter':\n split})\n text_splits.append(payload)\n return text_splits\n\n @staticmethod\n def _valid_index(document: str, idx: int):\n if idx <= 0:\n return 0\n if idx >= len(document):\n return len(document)\n new_idx = idx\n while new_idx > 0:\n if document[new_idx] in [' ', '\\n', '\\t']:\n break\n new_idx -= 1\n return new_idx\n",
"step-4": "import logging\nfrom typing import List, Optional\nimport uuid\nfrom pydantic import BaseModel\nfrom obsei.payload import TextPayload\nfrom obsei.preprocessor.base_preprocessor import BaseTextPreprocessor, BaseTextProcessorConfig\nlogger = logging.getLogger(__name__)\n\n\nclass TextSplitterPayload(BaseModel):\n phrase: str\n chunk_id: int\n chunk_length: int\n start_index: int\n end_index: int\n document_id: str\n text_length: int\n total_chunks: Optional[int]\n\n\nclass TextSplitterConfig(BaseTextProcessorConfig):\n max_split_length: int = 512\n split_stride: int = 0\n document_id_key: Optional[str]\n\n\nclass TextSplitter(BaseTextPreprocessor):\n\n def preprocess_input(self, input_list: List[TextPayload], config:\n TextSplitterConfig, **kwargs) ->List[TextPayload]:\n text_splits: List[TextPayload] = []\n for idx, input_data in enumerate(input_list):\n if (config.document_id_key and input_data.meta and config.\n document_id_key in input_data.meta):\n document_id = str(input_data.meta.get(config.document_id_key))\n else:\n document_id = uuid.uuid4().hex\n start_idx = 0\n split_id = 0\n document_splits: List[TextSplitterPayload] = []\n document_length = len(input_data.processed_text)\n while start_idx < document_length:\n if config.split_stride > 0 and start_idx > 0:\n start_idx = self._valid_index(input_data.processed_text,\n start_idx - config.split_stride) + 1\n end_idx = self._valid_index(input_data.processed_text, min(\n start_idx + config.max_split_length, document_length))\n phrase = input_data.processed_text[start_idx:end_idx]\n document_splits.append(TextSplitterPayload(phrase=phrase,\n chunk_id=split_id, chunk_length=len(phrase),\n start_index=start_idx, end_index=end_idx, document_id=\n document_id, text_length=document_length))\n start_idx = end_idx + 1\n split_id += 1\n total_splits = len(document_splits)\n for split in document_splits:\n split.total_chunks = total_splits\n payload = TextPayload(processed_text=split.phrase,\n source_name=input_data.source_name, segmented_data=\n input_data.segmented_data, meta={**input_data.meta, **{\n 'splitter': split}} if input_data.meta else {'splitter':\n split})\n text_splits.append(payload)\n return text_splits\n\n @staticmethod\n def _valid_index(document: str, idx: int):\n if idx <= 0:\n return 0\n if idx >= len(document):\n return len(document)\n new_idx = idx\n while new_idx > 0:\n if document[new_idx] in [' ', '\\n', '\\t']:\n break\n new_idx -= 1\n return new_idx\n",
"step-5": "import logging\nfrom typing import List, Optional\nimport uuid\n\nfrom pydantic import BaseModel\n\nfrom obsei.payload import TextPayload\nfrom obsei.preprocessor.base_preprocessor import (\n BaseTextPreprocessor,\n BaseTextProcessorConfig,\n)\n\nlogger = logging.getLogger(__name__)\n\n\nclass TextSplitterPayload(BaseModel):\n phrase: str\n chunk_id: int\n chunk_length: int\n start_index: int\n end_index: int\n document_id: str\n text_length: int\n total_chunks: Optional[int]\n\n\nclass TextSplitterConfig(BaseTextProcessorConfig):\n max_split_length: int = 512\n split_stride: int = 0 # overlap length\n document_id_key: Optional[str] # document_id in meta\n\n\nclass TextSplitter(BaseTextPreprocessor):\n def preprocess_input( # type: ignore[override]\n self, input_list: List[TextPayload], config: TextSplitterConfig, **kwargs\n ) -> List[TextPayload]:\n text_splits: List[TextPayload] = []\n for idx, input_data in enumerate(input_list):\n if (\n config.document_id_key\n and input_data.meta\n and config.document_id_key in input_data.meta\n ):\n document_id = str(input_data.meta.get(config.document_id_key))\n else:\n document_id = uuid.uuid4().hex\n start_idx = 0\n split_id = 0\n document_splits: List[TextSplitterPayload] = []\n document_length = len(input_data.processed_text)\n while start_idx < document_length:\n if config.split_stride > 0 and start_idx > 0:\n start_idx = (\n self._valid_index(\n input_data.processed_text, start_idx - config.split_stride\n )\n + 1\n )\n end_idx = self._valid_index(\n input_data.processed_text,\n min(start_idx + config.max_split_length, document_length),\n )\n\n phrase = input_data.processed_text[start_idx:end_idx]\n document_splits.append(\n TextSplitterPayload(\n phrase=phrase,\n chunk_id=split_id,\n chunk_length=len(phrase),\n start_index=start_idx,\n end_index=end_idx,\n document_id=document_id,\n text_length=document_length,\n )\n )\n start_idx = end_idx + 1\n split_id += 1\n\n total_splits = len(document_splits)\n for split in document_splits:\n split.total_chunks = total_splits\n payload = TextPayload(\n processed_text=split.phrase,\n source_name=input_data.source_name,\n segmented_data=input_data.segmented_data,\n meta={**input_data.meta, **{\"splitter\": split}}\n if input_data.meta\n else {\"splitter\": split},\n )\n text_splits.append(payload)\n\n return text_splits\n\n @staticmethod\n def _valid_index(document: str, idx: int):\n if idx <= 0:\n return 0\n if idx >= len(document):\n return len(document)\n new_idx = idx\n while new_idx > 0:\n if document[new_idx] in [\" \", \"\\n\", \"\\t\"]:\n break\n new_idx -= 1\n return new_idx\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
@click.command()
@click.option('--fastq', help='FASTQ file to subset, can be compressed')
@click.option('--subset', metavar='<INT>', help=
'Take every N reads (default:10)', default=10)
@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',
help='File to write log to. (default:./subset_fastq.log)')
@click.option('--log_level', default='INFO', help=
'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')
def main(fastq, subset, log_file, log_level):
"""Subset FASTQ Files.
cat input*.fastq | subset_fastq.py
or
subset_fastq.py --fastq input.fastq
"""
log_level = getattr(logging, log_level.upper(), logging.INFO)
msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'
logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=
log_level)
log_handler = logging.FileHandler(log_file, mode='w')
formatter = logging.Formatter(msg_format)
log_handler.setFormatter(formatter)
logger = logging.getLogger('subset_fastq')
logger.addHandler(log_handler)
if fastq:
fastq = os.path.abspath(fastq)
logger.info(subset_fastq(fastq, subset))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def subset_fastq(fastq, subset):
"""Subset FASTQ file. Pick 1/subset reads.
If reverse, fasta <= length
"""
seqio_in = sys.stdin
fh = ''
count = 0
total = 0
if not fastq:
for record in get_seqio_fastq_record(seqio_in):
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
else:
fh = return_filehandle(fastq)
for record in get_seqio_fastq_record(fh):
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
return 'Output {} reads'.format(total)
@click.command()
@click.option('--fastq', help='FASTQ file to subset, can be compressed')
@click.option('--subset', metavar='<INT>', help=
'Take every N reads (default:10)', default=10)
@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',
help='File to write log to. (default:./subset_fastq.log)')
@click.option('--log_level', default='INFO', help=
'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')
def main(fastq, subset, log_file, log_level):
"""Subset FASTQ Files.
cat input*.fastq | subset_fastq.py
or
subset_fastq.py --fastq input.fastq
"""
log_level = getattr(logging, log_level.upper(), logging.INFO)
msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'
logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=
log_level)
log_handler = logging.FileHandler(log_file, mode='w')
formatter = logging.Formatter(msg_format)
log_handler.setFormatter(formatter)
logger = logging.getLogger('subset_fastq')
logger.addHandler(log_handler)
if fastq:
fastq = os.path.abspath(fastq)
logger.info(subset_fastq(fastq, subset))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
signal(SIGPIPE, SIG_DFL)
def subset_fastq(fastq, subset):
"""Subset FASTQ file. Pick 1/subset reads.
If reverse, fasta <= length
"""
seqio_in = sys.stdin
fh = ''
count = 0
total = 0
if not fastq:
for record in get_seqio_fastq_record(seqio_in):
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
else:
fh = return_filehandle(fastq)
for record in get_seqio_fastq_record(fh):
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
return 'Output {} reads'.format(total)
@click.command()
@click.option('--fastq', help='FASTQ file to subset, can be compressed')
@click.option('--subset', metavar='<INT>', help=
'Take every N reads (default:10)', default=10)
@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',
help='File to write log to. (default:./subset_fastq.log)')
@click.option('--log_level', default='INFO', help=
'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')
def main(fastq, subset, log_file, log_level):
"""Subset FASTQ Files.
cat input*.fastq | subset_fastq.py
or
subset_fastq.py --fastq input.fastq
"""
log_level = getattr(logging, log_level.upper(), logging.INFO)
msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'
logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=
log_level)
log_handler = logging.FileHandler(log_file, mode='w')
formatter = logging.Formatter(msg_format)
log_handler.setFormatter(formatter)
logger = logging.getLogger('subset_fastq')
logger.addHandler(log_handler)
if fastq:
fastq = os.path.abspath(fastq)
logger.info(subset_fastq(fastq, subset))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import os
import sys
import click
import logging
from signal import signal, SIGPIPE, SIG_DFL
from ..helpers.file_helpers import return_filehandle
from ..helpers.sequence_helpers import get_seqio_fastq_record
signal(SIGPIPE, SIG_DFL)
def subset_fastq(fastq, subset):
"""Subset FASTQ file. Pick 1/subset reads.
If reverse, fasta <= length
"""
seqio_in = sys.stdin
fh = ''
count = 0
total = 0
if not fastq:
for record in get_seqio_fastq_record(seqio_in):
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
else:
fh = return_filehandle(fastq)
for record in get_seqio_fastq_record(fh):
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
return 'Output {} reads'.format(total)
@click.command()
@click.option('--fastq', help='FASTQ file to subset, can be compressed')
@click.option('--subset', metavar='<INT>', help=
'Take every N reads (default:10)', default=10)
@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',
help='File to write log to. (default:./subset_fastq.log)')
@click.option('--log_level', default='INFO', help=
'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')
def main(fastq, subset, log_file, log_level):
"""Subset FASTQ Files.
cat input*.fastq | subset_fastq.py
or
subset_fastq.py --fastq input.fastq
"""
log_level = getattr(logging, log_level.upper(), logging.INFO)
msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'
logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=
log_level)
log_handler = logging.FileHandler(log_file, mode='w')
formatter = logging.Formatter(msg_format)
log_handler.setFormatter(formatter)
logger = logging.getLogger('subset_fastq')
logger.addHandler(log_handler)
if fastq:
fastq = os.path.abspath(fastq)
logger.info(subset_fastq(fastq, subset))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#!/usr/bin/env python
import os
import sys
import click
import logging
from signal import signal, SIGPIPE, SIG_DFL
from ..helpers.file_helpers import return_filehandle
from ..helpers.sequence_helpers import get_seqio_fastq_record
signal(SIGPIPE, SIG_DFL)
def subset_fastq(fastq, subset):
'''Subset FASTQ file. Pick 1/subset reads.
If reverse, fasta <= length
'''
seqio_in = sys.stdin
fh = ''
count = 0
total = 0
if not fastq: # Check STDIN
for record in get_seqio_fastq_record(seqio_in): # get SeqIO record
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
else: # Check FASTA
fh = return_filehandle(fastq)
for record in get_seqio_fastq_record(fh): # Get SeqIO record
count += 1
if count == subset:
count = 0
total += 1
sys.stdout.write(record.format('fastq'))
sys.stdout.flush()
return 'Output {} reads'.format(total)
@click.command()
@click.option('--fastq',
help='''FASTQ file to subset, can be compressed''')
@click.option('--subset', metavar = '<INT>',
help='''Take every N reads (default:10)''', default=10)
@click.option('--log_file', metavar = '<FILE>', default='./subset_fastq.log',
help='''File to write log to. (default:./subset_fastq.log)''')
@click.option('--log_level', default='INFO',
help='''Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)''')
def main(fastq, subset, log_file, log_level):
'''Subset FASTQ Files.
cat input*.fastq | subset_fastq.py
or
subset_fastq.py --fastq input.fastq
'''
log_level = getattr(logging, log_level.upper(), logging.INFO)
msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'
logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M',
level=log_level)
log_handler = logging.FileHandler(log_file, mode='w')
formatter = logging.Formatter(msg_format)
log_handler.setFormatter(formatter)
logger = logging.getLogger('subset_fastq')
logger.addHandler(log_handler)
if fastq:
fastq = os.path.abspath(fastq)
logger.info(subset_fastq(fastq, subset))
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "873a53983e3aeb66bd290450fb9c15a552bd163c",
"index": 4017,
"step-1": "<mask token>\n\n\n@click.command()\n@click.option('--fastq', help='FASTQ file to subset, can be compressed')\n@click.option('--subset', metavar='<INT>', help=\n 'Take every N reads (default:10)', default=10)\n@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',\n help='File to write log to. (default:./subset_fastq.log)')\n@click.option('--log_level', default='INFO', help=\n 'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')\ndef main(fastq, subset, log_file, log_level):\n \"\"\"Subset FASTQ Files.\n\n cat input*.fastq | subset_fastq.py\n\n or\n\n subset_fastq.py --fastq input.fastq\n \"\"\"\n log_level = getattr(logging, log_level.upper(), logging.INFO)\n msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'\n logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=\n log_level)\n log_handler = logging.FileHandler(log_file, mode='w')\n formatter = logging.Formatter(msg_format)\n log_handler.setFormatter(formatter)\n logger = logging.getLogger('subset_fastq')\n logger.addHandler(log_handler)\n if fastq:\n fastq = os.path.abspath(fastq)\n logger.info(subset_fastq(fastq, subset))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef subset_fastq(fastq, subset):\n \"\"\"Subset FASTQ file. Pick 1/subset reads.\n\n If reverse, fasta <= length\n \"\"\"\n seqio_in = sys.stdin\n fh = ''\n count = 0\n total = 0\n if not fastq:\n for record in get_seqio_fastq_record(seqio_in):\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n else:\n fh = return_filehandle(fastq)\n for record in get_seqio_fastq_record(fh):\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n return 'Output {} reads'.format(total)\n\n\n@click.command()\n@click.option('--fastq', help='FASTQ file to subset, can be compressed')\n@click.option('--subset', metavar='<INT>', help=\n 'Take every N reads (default:10)', default=10)\n@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',\n help='File to write log to. (default:./subset_fastq.log)')\n@click.option('--log_level', default='INFO', help=\n 'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')\ndef main(fastq, subset, log_file, log_level):\n \"\"\"Subset FASTQ Files.\n\n cat input*.fastq | subset_fastq.py\n\n or\n\n subset_fastq.py --fastq input.fastq\n \"\"\"\n log_level = getattr(logging, log_level.upper(), logging.INFO)\n msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'\n logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=\n log_level)\n log_handler = logging.FileHandler(log_file, mode='w')\n formatter = logging.Formatter(msg_format)\n log_handler.setFormatter(formatter)\n logger = logging.getLogger('subset_fastq')\n logger.addHandler(log_handler)\n if fastq:\n fastq = os.path.abspath(fastq)\n logger.info(subset_fastq(fastq, subset))\n\n\n<mask token>\n",
"step-3": "<mask token>\nsignal(SIGPIPE, SIG_DFL)\n\n\ndef subset_fastq(fastq, subset):\n \"\"\"Subset FASTQ file. Pick 1/subset reads.\n\n If reverse, fasta <= length\n \"\"\"\n seqio_in = sys.stdin\n fh = ''\n count = 0\n total = 0\n if not fastq:\n for record in get_seqio_fastq_record(seqio_in):\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n else:\n fh = return_filehandle(fastq)\n for record in get_seqio_fastq_record(fh):\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n return 'Output {} reads'.format(total)\n\n\n@click.command()\n@click.option('--fastq', help='FASTQ file to subset, can be compressed')\n@click.option('--subset', metavar='<INT>', help=\n 'Take every N reads (default:10)', default=10)\n@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',\n help='File to write log to. (default:./subset_fastq.log)')\n@click.option('--log_level', default='INFO', help=\n 'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')\ndef main(fastq, subset, log_file, log_level):\n \"\"\"Subset FASTQ Files.\n\n cat input*.fastq | subset_fastq.py\n\n or\n\n subset_fastq.py --fastq input.fastq\n \"\"\"\n log_level = getattr(logging, log_level.upper(), logging.INFO)\n msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'\n logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=\n log_level)\n log_handler = logging.FileHandler(log_file, mode='w')\n formatter = logging.Formatter(msg_format)\n log_handler.setFormatter(formatter)\n logger = logging.getLogger('subset_fastq')\n logger.addHandler(log_handler)\n if fastq:\n fastq = os.path.abspath(fastq)\n logger.info(subset_fastq(fastq, subset))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import os\nimport sys\nimport click\nimport logging\nfrom signal import signal, SIGPIPE, SIG_DFL\nfrom ..helpers.file_helpers import return_filehandle\nfrom ..helpers.sequence_helpers import get_seqio_fastq_record\nsignal(SIGPIPE, SIG_DFL)\n\n\ndef subset_fastq(fastq, subset):\n \"\"\"Subset FASTQ file. Pick 1/subset reads.\n\n If reverse, fasta <= length\n \"\"\"\n seqio_in = sys.stdin\n fh = ''\n count = 0\n total = 0\n if not fastq:\n for record in get_seqio_fastq_record(seqio_in):\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n else:\n fh = return_filehandle(fastq)\n for record in get_seqio_fastq_record(fh):\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n return 'Output {} reads'.format(total)\n\n\n@click.command()\n@click.option('--fastq', help='FASTQ file to subset, can be compressed')\n@click.option('--subset', metavar='<INT>', help=\n 'Take every N reads (default:10)', default=10)\n@click.option('--log_file', metavar='<FILE>', default='./subset_fastq.log',\n help='File to write log to. (default:./subset_fastq.log)')\n@click.option('--log_level', default='INFO', help=\n 'Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)')\ndef main(fastq, subset, log_file, log_level):\n \"\"\"Subset FASTQ Files.\n\n cat input*.fastq | subset_fastq.py\n\n or\n\n subset_fastq.py --fastq input.fastq\n \"\"\"\n log_level = getattr(logging, log_level.upper(), logging.INFO)\n msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'\n logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M', level=\n log_level)\n log_handler = logging.FileHandler(log_file, mode='w')\n formatter = logging.Formatter(msg_format)\n log_handler.setFormatter(formatter)\n logger = logging.getLogger('subset_fastq')\n logger.addHandler(log_handler)\n if fastq:\n fastq = os.path.abspath(fastq)\n logger.info(subset_fastq(fastq, subset))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python\n\nimport os\nimport sys\nimport click\nimport logging\nfrom signal import signal, SIGPIPE, SIG_DFL\nfrom ..helpers.file_helpers import return_filehandle\nfrom ..helpers.sequence_helpers import get_seqio_fastq_record\n\nsignal(SIGPIPE, SIG_DFL)\n\n\ndef subset_fastq(fastq, subset):\n '''Subset FASTQ file. Pick 1/subset reads.\n\n If reverse, fasta <= length\n '''\n seqio_in = sys.stdin\n fh = ''\n count = 0\n total = 0\n if not fastq: # Check STDIN\n for record in get_seqio_fastq_record(seqio_in): # get SeqIO record\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n else: # Check FASTA\n fh = return_filehandle(fastq)\n for record in get_seqio_fastq_record(fh): # Get SeqIO record\n count += 1\n if count == subset:\n count = 0\n total += 1\n sys.stdout.write(record.format('fastq'))\n sys.stdout.flush()\n return 'Output {} reads'.format(total)\n\n\n@click.command() \n@click.option('--fastq',\n help='''FASTQ file to subset, can be compressed''')\n@click.option('--subset', metavar = '<INT>',\n help='''Take every N reads (default:10)''', default=10)\n@click.option('--log_file', metavar = '<FILE>', default='./subset_fastq.log',\n help='''File to write log to. (default:./subset_fastq.log)''')\n@click.option('--log_level', default='INFO',\n help='''Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL (default:INFO)''')\ndef main(fastq, subset, log_file, log_level):\n '''Subset FASTQ Files.\n\n cat input*.fastq | subset_fastq.py\n\n or\n\n subset_fastq.py --fastq input.fastq\n '''\n log_level = getattr(logging, log_level.upper(), logging.INFO)\n msg_format = '%(asctime)s|%(name)s|[%(levelname)s]: %(message)s'\n logging.basicConfig(format=msg_format, datefmt='%m-%d %H:%M',\n level=log_level)\n log_handler = logging.FileHandler(log_file, mode='w')\n formatter = logging.Formatter(msg_format)\n log_handler.setFormatter(formatter)\n logger = logging.getLogger('subset_fastq')\n logger.addHandler(log_handler)\n if fastq:\n fastq = os.path.abspath(fastq)\n logger.info(subset_fastq(fastq, subset))\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import logging
class ConsoleLogger:
handlers = [
(logging.StreamHandler,
dict(),
"[%(name)s]\t %(asctime)s [%(levelname)s] %(message)s ",
logging.DEBUG)
]
def set_level(self, level):
self.logger.setLevel(level)
def debug(self, message):
self.logger.debug(message)
def info(self, message):
self.logger.info(message)
def warning(self, message):
self.logger.warning(message)
def error(self, message):
self.logger.error(message)
def exception(self, message):
self.logger.exception(message)
def __init__(self, name=__name__, default_level=logging.DEBUG):
self.logger = logging.Logger(name)
if not self.logger.handlers or len(self.logger.handlers) < 1:
for handler_class, params, formatted, level in self.handlers:
handler = handler_class(**params)
handler.setFormatter(logging.Formatter(formatted))
handler.setLevel(level if not default_level else default_level)
self.logger.addHandler(handler)
|
normal
|
{
"blob_id": "5299f2c66fd287be667ecbe11b8470263eafab5c",
"index": 702,
"step-1": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-2": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n <mask token>\n <mask token>\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-3": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n\n def warning(self, message):\n self.logger.warning(message)\n <mask token>\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-4": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n\n def warning(self, message):\n self.logger.warning(message)\n\n def error(self, message):\n self.logger.error(message)\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-5": "import logging\n\n\nclass ConsoleLogger:\n\n handlers = [\n (logging.StreamHandler,\n dict(),\n \"[%(name)s]\\t %(asctime)s [%(levelname)s] %(message)s \",\n logging.DEBUG)\n ]\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n\n def warning(self, message):\n self.logger.warning(message)\n\n def error(self, message):\n self.logger.error(message)\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n\n self.logger.addHandler(handler)\n",
"step-ids": [
3,
6,
7,
8,
11
]
}
|
[
3,
6,
7,
8,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
name = 'flask_gunicorn'
workers = multiprocessing.cpu_count() * 2 + 1
loglevel = 'debug'
bind = f'0.0.0.0:18080'
<|reserved_special_token_1|>
import multiprocessing
name = 'flask_gunicorn'
workers = multiprocessing.cpu_count() * 2 + 1
loglevel = 'debug'
bind = f'0.0.0.0:18080'
<|reserved_special_token_1|>
import multiprocessing
name = "flask_gunicorn"
workers = multiprocessing.cpu_count() * 2 + 1
loglevel = "debug"
bind = f"0.0.0.0:18080"
|
flexible
|
{
"blob_id": "2ad326f739b42b9c7c252078b8c28e90da17b95d",
"index": 1802,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nname = 'flask_gunicorn'\nworkers = multiprocessing.cpu_count() * 2 + 1\nloglevel = 'debug'\nbind = f'0.0.0.0:18080'\n",
"step-3": "import multiprocessing\nname = 'flask_gunicorn'\nworkers = multiprocessing.cpu_count() * 2 + 1\nloglevel = 'debug'\nbind = f'0.0.0.0:18080'\n",
"step-4": "import multiprocessing\n\nname = \"flask_gunicorn\"\nworkers = multiprocessing.cpu_count() * 2 + 1\nloglevel = \"debug\"\nbind = f\"0.0.0.0:18080\"\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [url('^$', jsonre_data), url('^serialize/$', SerializeView.
as_view()), url('^serialize/$', SerializeView.as_view()), url('^all/$',
Serializeall.as_view()), url('^cbv1/$', jsonView.as_view()), url(
'^cbv2/$', JsonView2.as_view()), url('^api/updates/', include(
'updateapp.api.urls')), path('admin/', admin.site.urls)]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from django.contrib import admin
from django.urls import path
from updateapp.views import jsonre_data, jsonView, JsonView2, SerializeView, Serializeall
from updateapp.api import views, urls
from django.conf.urls import url, include
urlpatterns = [url('^$', jsonre_data), url('^serialize/$', SerializeView.
as_view()), url('^serialize/$', SerializeView.as_view()), url('^all/$',
Serializeall.as_view()), url('^cbv1/$', jsonView.as_view()), url(
'^cbv2/$', JsonView2.as_view()), url('^api/updates/', include(
'updateapp.api.urls')), path('admin/', admin.site.urls)]
<|reserved_special_token_1|>
"""updateimage URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from updateapp.views import jsonre_data,jsonView,JsonView2,SerializeView,Serializeall
from updateapp.api import views,urls
from django.conf.urls import url,include
urlpatterns = [
url(r'^$',jsonre_data),
url(r'^serialize/$',SerializeView.as_view()),
url(r'^serialize/$',SerializeView.as_view()),
url(r'^all/$',Serializeall.as_view()),
url(r'^cbv1/$',jsonView.as_view()),
url(r'^cbv2/$',JsonView2.as_view()),
url(r'^api/updates/',include('updateapp.api.urls')),
path('admin/', admin.site.urls),
]
|
flexible
|
{
"blob_id": "b3b5f7eeb81e10a51eb0322bc5278d33ee5f8e97",
"index": 9222,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^$', jsonre_data), url('^serialize/$', SerializeView.\n as_view()), url('^serialize/$', SerializeView.as_view()), url('^all/$',\n Serializeall.as_view()), url('^cbv1/$', jsonView.as_view()), url(\n '^cbv2/$', JsonView2.as_view()), url('^api/updates/', include(\n 'updateapp.api.urls')), path('admin/', admin.site.urls)]\n",
"step-3": "<mask token>\nfrom django.contrib import admin\nfrom django.urls import path\nfrom updateapp.views import jsonre_data, jsonView, JsonView2, SerializeView, Serializeall\nfrom updateapp.api import views, urls\nfrom django.conf.urls import url, include\nurlpatterns = [url('^$', jsonre_data), url('^serialize/$', SerializeView.\n as_view()), url('^serialize/$', SerializeView.as_view()), url('^all/$',\n Serializeall.as_view()), url('^cbv1/$', jsonView.as_view()), url(\n '^cbv2/$', JsonView2.as_view()), url('^api/updates/', include(\n 'updateapp.api.urls')), path('admin/', admin.site.urls)]\n",
"step-4": "\"\"\"updateimage URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.1/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.contrib import admin\nfrom django.urls import path\nfrom updateapp.views import jsonre_data,jsonView,JsonView2,SerializeView,Serializeall\nfrom updateapp.api import views,urls\nfrom django.conf.urls import url,include\n\nurlpatterns = [\n url(r'^$',jsonre_data),\n \n url(r'^serialize/$',SerializeView.as_view()),\n \n url(r'^serialize/$',SerializeView.as_view()),\n \n url(r'^all/$',Serializeall.as_view()),\n url(r'^cbv1/$',jsonView.as_view()),\n url(r'^cbv2/$',JsonView2.as_view()),\n url(r'^api/updates/',include('updateapp.api.urls')),\n\n path('admin/', admin.site.urls),\n\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from editor.editor import Editor
e = Editor()
e.showWindow()
|
normal
|
{
"blob_id": "46d6771fd9f589e2498cd019ba72232cbda06e5a",
"index": 3108,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ne.showWindow()\n",
"step-3": "<mask token>\ne = Editor()\ne.showWindow()\n",
"step-4": "from editor.editor import Editor\ne = Editor()\ne.showWindow()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
import optparse
from bs4 import BeautifulSoup
import re
import jieba
import pickle
import requests
import asyncio
if __name__ == '__main__':
# 读取10000个关键词
fs = open("./src/keywords.txt", "rb")
keywords = fs.read().decode("utf-8").split(",")
fs.close()
# 找出特征
def find_features(doc):
words = set(doc)
features = {}
for word in keywords:
features["contains %s" % word] = (word in words)
return features
# 读取预先做好的nltk分词器
fs = open('./src/my_classifier.pickle', 'rb')
classifier = pickle.load(fs)
# 匹配中文字符
regex = re.compile("[\u4e00-\u9fa5]")
p = optparse.OptionParser(usage="usage: %prog [options] arg1 arg2", version="%prog 0.1", prog="url-tagger")
p.add_option("--url", "-u", help="Your url")
p.add_option("--file", "-f", help="Your url file. One line one url")
(options, arguments) = p.parse_args()
url_list = []
for key, value in options.__dict__.items():
if value is not None:
print("%s: %s" % (key, value))
if key is "url":
url_list.append(value)
else:
url_file = open(value, "rb+")
for line in url_file.readlines():
url_list.append(str(line, encoding="utf-8").strip())
# 异步发起http请求
@asyncio.coroutine
def get_docs(url):
response = requests.get(url=url, headers={'Accept-Encoding': ''})
# print(response.apparent_encoding)
html = str(response.content, encoding=response.apparent_encoding, errors="ignore")
soup = BeautifulSoup(html, "lxml")
for script in soup(["script", "style"]):
script.extract()
text = soup.get_text()
lines = (line.strip() for line in text.splitlines())
chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
text = "".join(chunk for chunk in chunks if chunk)
# print(text)
return url, text
loop = asyncio.get_event_loop()
tasks = list(map(lambda url: asyncio.ensure_future(get_docs(url)), url_list))
data_list = list(loop.run_until_complete(asyncio.gather(*tasks)))
loop.close()
# 分类器进行分类
results = [(url, classifier.classify(find_features(jieba.lcut("".join(regex.findall(data)))))) for (url, data)
in data_list]
# 打印结果
for (url, category) in results:
print("%s: %s" % (url, category))
|
normal
|
{
"blob_id": "88590aef975f7e473ef964ee0c4004cff7e24b07",
"index": 1049,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n fs = open('./src/keywords.txt', 'rb')\n keywords = fs.read().decode('utf-8').split(',')\n fs.close()\n\n def find_features(doc):\n words = set(doc)\n features = {}\n for word in keywords:\n features['contains %s' % word] = word in words\n return features\n fs = open('./src/my_classifier.pickle', 'rb')\n classifier = pickle.load(fs)\n regex = re.compile('[一-龥]')\n p = optparse.OptionParser(usage='usage: %prog [options] arg1 arg2',\n version='%prog 0.1', prog='url-tagger')\n p.add_option('--url', '-u', help='Your url')\n p.add_option('--file', '-f', help='Your url file. One line one url')\n options, arguments = p.parse_args()\n url_list = []\n for key, value in options.__dict__.items():\n if value is not None:\n print('%s: %s' % (key, value))\n if key is 'url':\n url_list.append(value)\n else:\n url_file = open(value, 'rb+')\n for line in url_file.readlines():\n url_list.append(str(line, encoding='utf-8').strip())\n\n @asyncio.coroutine\n def get_docs(url):\n response = requests.get(url=url, headers={'Accept-Encoding': ''})\n html = str(response.content, encoding=response.apparent_encoding,\n errors='ignore')\n soup = BeautifulSoup(html, 'lxml')\n for script in soup(['script', 'style']):\n script.extract()\n text = soup.get_text()\n lines = (line.strip() for line in text.splitlines())\n chunks = (phrase.strip() for line in lines for phrase in line.split\n (' '))\n text = ''.join(chunk for chunk in chunks if chunk)\n return url, text\n loop = asyncio.get_event_loop()\n tasks = list(map(lambda url: asyncio.ensure_future(get_docs(url)),\n url_list))\n data_list = list(loop.run_until_complete(asyncio.gather(*tasks)))\n loop.close()\n results = [(url, classifier.classify(find_features(jieba.lcut(''.join(\n regex.findall(data)))))) for url, data in data_list]\n for url, category in results:\n print('%s: %s' % (url, category))\n",
"step-3": "import optparse\nfrom bs4 import BeautifulSoup\nimport re\nimport jieba\nimport pickle\nimport requests\nimport asyncio\nif __name__ == '__main__':\n fs = open('./src/keywords.txt', 'rb')\n keywords = fs.read().decode('utf-8').split(',')\n fs.close()\n\n def find_features(doc):\n words = set(doc)\n features = {}\n for word in keywords:\n features['contains %s' % word] = word in words\n return features\n fs = open('./src/my_classifier.pickle', 'rb')\n classifier = pickle.load(fs)\n regex = re.compile('[一-龥]')\n p = optparse.OptionParser(usage='usage: %prog [options] arg1 arg2',\n version='%prog 0.1', prog='url-tagger')\n p.add_option('--url', '-u', help='Your url')\n p.add_option('--file', '-f', help='Your url file. One line one url')\n options, arguments = p.parse_args()\n url_list = []\n for key, value in options.__dict__.items():\n if value is not None:\n print('%s: %s' % (key, value))\n if key is 'url':\n url_list.append(value)\n else:\n url_file = open(value, 'rb+')\n for line in url_file.readlines():\n url_list.append(str(line, encoding='utf-8').strip())\n\n @asyncio.coroutine\n def get_docs(url):\n response = requests.get(url=url, headers={'Accept-Encoding': ''})\n html = str(response.content, encoding=response.apparent_encoding,\n errors='ignore')\n soup = BeautifulSoup(html, 'lxml')\n for script in soup(['script', 'style']):\n script.extract()\n text = soup.get_text()\n lines = (line.strip() for line in text.splitlines())\n chunks = (phrase.strip() for line in lines for phrase in line.split\n (' '))\n text = ''.join(chunk for chunk in chunks if chunk)\n return url, text\n loop = asyncio.get_event_loop()\n tasks = list(map(lambda url: asyncio.ensure_future(get_docs(url)),\n url_list))\n data_list = list(loop.run_until_complete(asyncio.gather(*tasks)))\n loop.close()\n results = [(url, classifier.classify(find_features(jieba.lcut(''.join(\n regex.findall(data)))))) for url, data in data_list]\n for url, category in results:\n print('%s: %s' % (url, category))\n",
"step-4": "#!/usr/bin/env python3\n\nimport optparse\nfrom bs4 import BeautifulSoup\nimport re\nimport jieba\nimport pickle\nimport requests\nimport asyncio\n\nif __name__ == '__main__':\n\n # 读取10000个关键词\n fs = open(\"./src/keywords.txt\", \"rb\")\n keywords = fs.read().decode(\"utf-8\").split(\",\")\n fs.close()\n\n # 找出特征\n def find_features(doc):\n words = set(doc)\n features = {}\n for word in keywords:\n features[\"contains %s\" % word] = (word in words)\n return features\n\n # 读取预先做好的nltk分词器\n fs = open('./src/my_classifier.pickle', 'rb')\n classifier = pickle.load(fs)\n\n # 匹配中文字符\n regex = re.compile(\"[\\u4e00-\\u9fa5]\")\n\n p = optparse.OptionParser(usage=\"usage: %prog [options] arg1 arg2\", version=\"%prog 0.1\", prog=\"url-tagger\")\n p.add_option(\"--url\", \"-u\", help=\"Your url\")\n p.add_option(\"--file\", \"-f\", help=\"Your url file. One line one url\")\n (options, arguments) = p.parse_args()\n\n url_list = []\n for key, value in options.__dict__.items():\n if value is not None:\n print(\"%s: %s\" % (key, value))\n if key is \"url\":\n url_list.append(value)\n else:\n url_file = open(value, \"rb+\")\n for line in url_file.readlines():\n url_list.append(str(line, encoding=\"utf-8\").strip())\n\n\n # 异步发起http请求\n @asyncio.coroutine\n def get_docs(url):\n response = requests.get(url=url, headers={'Accept-Encoding': ''})\n # print(response.apparent_encoding)\n html = str(response.content, encoding=response.apparent_encoding, errors=\"ignore\")\n soup = BeautifulSoup(html, \"lxml\")\n for script in soup([\"script\", \"style\"]):\n script.extract()\n text = soup.get_text()\n lines = (line.strip() for line in text.splitlines())\n chunks = (phrase.strip() for line in lines for phrase in line.split(\" \"))\n text = \"\".join(chunk for chunk in chunks if chunk)\n # print(text)\n return url, text\n\n loop = asyncio.get_event_loop()\n tasks = list(map(lambda url: asyncio.ensure_future(get_docs(url)), url_list))\n data_list = list(loop.run_until_complete(asyncio.gather(*tasks)))\n loop.close()\n\n # 分类器进行分类\n results = [(url, classifier.classify(find_features(jieba.lcut(\"\".join(regex.findall(data)))))) for (url, data)\n in data_list]\n\n # 打印结果\n for (url, category) in results:\n print(\"%s: %s\" % (url, category))\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
"""Part of speech mapping constants and functions for NLPIR/ICTCLAS.
This module is used by :mod:`pynlpir` to format segmented words for output.
"""
import logging
logger = logging.getLogger("pynlpir.pos_map")
#: A dictionary that maps part of speech codes returned by NLPIR to
#: human-readable names (English and Chinese).
POS_MAP = {
"n": (
"名词",
"noun",
{
"nr": (
"人名",
"personal name",
{
"nr1": ("汉语姓氏", "Chinese surname"),
"nr2": ("汉语名字", "Chinese given name"),
"nrj": ("日语人名", "Japanese personal name"),
"nrf": ("音译人名", "transcribed personal name"),
},
),
"ns": (
"地名",
"toponym",
{
"nsf": ("音译地名", "transcribed toponym"),
},
),
"nt": ("机构团体名", "organization/group name"),
"nz": ("其它专名", "other proper noun"),
"nl": ("名词性惯用语", "noun phrase"),
"ng": ("名词性语素", "noun morpheme"),
},
),
"t": (
"时间词",
"time word",
{
"tg": ("时间词性语素", "time morpheme"),
},
),
"s": ("处所词", "locative word"),
"f": ("方位词", "noun of locality"),
"v": (
"动词",
"verb",
{
"vd": ("副动词", "auxiliary verb"),
"vn": ("名动词", "noun-verb"),
"vshi": ('动词"是"', "verb 是"),
"vyou": ('动词"有"', "verb 有"),
"vf": ("趋向动词", "directional verb"),
"vx": ("行事动词", "performative verb"),
"vi": ("不及物动词", "intransitive verb"),
"vl": ("动词性惯用语", "verb phrase"),
"vg": ("动词性语素", "verb morpheme"),
},
),
"a": (
"形容词",
"adjective",
{
"ad": ("副形词", "auxiliary adjective"),
"an": ("名形词", "noun-adjective"),
"ag": ("形容词性语素", "adjective morpheme"),
"al": ("形容词性惯用语", "adjective phrase"),
},
),
"b": (
"区别词",
"distinguishing word",
{
"bl": ("区别词性惯用语", "distinguishing phrase"),
},
),
"z": ("状态词", "status word"),
"r": (
"代词",
"pronoun",
{
"rr": ("人称代词", "personal pronoun"),
"rz": (
"指示代词",
"demonstrative pronoun",
{
"rzt": ("时间指示代词", "temporal demonstrative pronoun"),
"rzs": ("处所指示代词", "locative demonstrative pronoun"),
"rzv": ("谓词性指示代词", "predicate demonstrative pronoun"),
},
),
"ry": (
"疑问代词",
"interrogative pronoun",
{
"ryt": ("时间疑问代词", "temporal interrogative pronoun"),
"rys": ("处所疑问代词", "locative interrogative pronoun"),
"ryv": ("谓词性疑问代词", "predicate interrogative pronoun"),
},
),
"rg": ("代词性语素", "pronoun morpheme"),
},
),
"m": (
"数词",
"numeral",
{
"mq": ("数量词", "numeral-plus-classifier compound"),
"mg": ("干支", "zodiac"),
},
),
"q": (
"量词",
"classifier",
{
"qv": ("动量词", "verbal classifier"),
"qt": ("时量词", "temporal classifier"),
},
),
"d": ("副词", "adverb"),
"p": (
"介词",
"preposition",
{
"pba": ("介词“把”", "preposition 把"),
"pbei": ("介词“被”", "preposition 被"),
},
),
"c": (
"连词",
"conjunction",
{
"cc": ("并列连词", "coordinating conjunction"),
},
),
"u": (
"助词",
"particle",
{
"uzhe": ("着", "particle 着"),
"ule": ("了/喽", "particle 了/喽"),
"uguo": ("过", "particle 过"),
"ude1": ("的/底", "particle 的/底"),
"ude2": ("地", "particle 地"),
"ude3": ("得", "particle 得"),
"usuo": ("所", "particle 所"),
"udeng": ("等/等等/云云", "particle 等/等等/云云"),
"uyy": ("一样/一般/似的/般", "particle 一样/一般/似的/般"),
"udh": ("的话", "particle 的话"),
"uls": ("来讲/来说/而言/说来", "particle 来讲/来说/而言/说来"),
"uzhi": ("之", "particle 之"),
"ulian": ("连", "particle 连"),
},
),
"e": ("叹词", "interjection"),
"y": ("语气词", "modal particle"),
"o": ("拟声词", "onomatopoeia"),
"h": ("前缀", "prefix"),
"k": ("后缀", "suffix"),
"x": (
"字符串",
"string",
{
"xe": ("Email字符串", "email address"),
"xs": ("微博会话分隔符", "hashtag"),
"xm": ("表情符合", "emoticon"),
"xu": ("网址URL", "URL"),
"xx": ("非语素字", "non-morpheme character"),
},
),
"w": (
"标点符号",
"punctuation mark",
{
"wkz": ("左括号", "left parenthesis/bracket"),
"wky": ("右括号", "right parenthesis/bracket"),
"wyz": ("左引号", "left quotation mark"),
"wyy": ("右引号", "right quotation mark"),
"wj": ("句号", "period"),
"ww": ("问号", "question mark"),
"wt": ("叹号", "exclamation mark"),
"wd": ("逗号", "comma"),
"wf": ("分号", "semicolon"),
"wn": ("顿号", "enumeration comma"),
"wm": ("冒号", "colon"),
"ws": ("省略号", "ellipsis"),
"wp": ("破折号", "dash"),
"wb": ("百分号千分号", "percent/per mille sign"),
"wh": ("单位符号", "unit of measure sign"),
},
),
"g": ("复合语", "multiword expression"),
"j": ("略语", "abbreviation"),
}
def _get_pos_name(pos_code, names="parent", english=True, pos_map=POS_MAP):
"""Gets the part of speech name for *pos_code*."""
if names not in ("parent", "child", "all", "raw"):
raise ValueError(
"names must be one of 'parent', 'child', 'all', or "
"'raw'; not '{0}'".format(names)
)
logger.debug(
"Getting {0} POS name for '{1}' formatted as '{2}'.".format(
"English" if english else "Chinese", pos_code, names
)
)
if names == "raw":
return pos_code
pos_code = pos_code.lower() # Issue #10
for i in range(1, len(pos_code) + 1):
try:
pos_key = pos_code[0:i]
pos_entry = pos_map[pos_key]
break
except KeyError:
if i == len(pos_code):
logger.warning("part of speech not recognized: '{0}'".format(pos_code))
return None # Issue #20
pos = (pos_entry[1 if english else 0],)
if names == "parent":
logger.debug("Part of speech name found: '{0}'".format(pos[0]))
return pos[0]
if len(pos_entry) == 3 and pos_key != pos_code:
sub_map = pos_entry[2]
logger.debug(
"Found parent part of speech name '{0}'. Descending to "
"look for child name for '{1}'".format(pos_entry[1], pos_code)
)
sub_pos = _get_pos_name(pos_code, names, english, sub_map)
if names == "all":
# sub_pos can be None sometimes (e.g. for a word '甲')
pos = pos + sub_pos if sub_pos else pos
else:
pos = (sub_pos,)
name = pos if names == "all" else pos[-1]
logger.debug("Part of speech name found: '{0}'".format(name))
return name
def get_pos_name(code, name="parent", english=True, pos_tags=POS_MAP):
"""Gets the part of speech name for *code*.
:param str code: The part of speech code to lookup, e.g. ``'nsf'``.
:param str name: Which part of speech name to include in the output. Must
be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.
Defaults to ``'parent'``. ``'parent'`` indicates that only the most
generic name should be used, e.g. ``'noun'`` for ``'nsf'``.
``'child'`` indicates that the most specific name should be used, e.g.
``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all
names should be used, e.g. ``('noun', 'toponym',
'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the
part of speech code is not transformed at all.
:param bool english: Whether to return an English or Chinese name.
:param dict pos_tags: Custom part of speech tags to use.
:returns: ``str`` if *name* is ``'parent'`` or ``'child'``.
``tuple`` if *name* is ``'all'``.
"""
return _get_pos_name(code, name, english, pos_tags)
|
normal
|
{
"blob_id": "093b2afef7cdfb7070eb5e94e84624afe495db66",
"index": 1948,
"step-1": "<mask token>\n\n\ndef get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):\n \"\"\"Gets the part of speech name for *code*.\n\n :param str code: The part of speech code to lookup, e.g. ``'nsf'``.\n :param str name: Which part of speech name to include in the output. Must\n be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.\n Defaults to ``'parent'``. ``'parent'`` indicates that only the most\n generic name should be used, e.g. ``'noun'`` for ``'nsf'``.\n ``'child'`` indicates that the most specific name should be used, e.g.\n ``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all\n names should be used, e.g. ``('noun', 'toponym',\n 'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the\n part of speech code is not transformed at all.\n :param bool english: Whether to return an English or Chinese name.\n :param dict pos_tags: Custom part of speech tags to use.\n :returns: ``str`` if *name* is ``'parent'`` or ``'child'``.\n ``tuple`` if *name* is ``'all'``.\n\n \"\"\"\n return _get_pos_name(code, name, english, pos_tags)\n",
"step-2": "<mask token>\n\n\ndef _get_pos_name(pos_code, names='parent', english=True, pos_map=POS_MAP):\n \"\"\"Gets the part of speech name for *pos_code*.\"\"\"\n if names not in ('parent', 'child', 'all', 'raw'):\n raise ValueError(\n \"names must be one of 'parent', 'child', 'all', or 'raw'; not '{0}'\"\n .format(names))\n logger.debug(\"Getting {0} POS name for '{1}' formatted as '{2}'.\".\n format('English' if english else 'Chinese', pos_code, names))\n if names == 'raw':\n return pos_code\n pos_code = pos_code.lower()\n for i in range(1, len(pos_code) + 1):\n try:\n pos_key = pos_code[0:i]\n pos_entry = pos_map[pos_key]\n break\n except KeyError:\n if i == len(pos_code):\n logger.warning(\"part of speech not recognized: '{0}'\".\n format(pos_code))\n return None\n pos = pos_entry[1 if english else 0],\n if names == 'parent':\n logger.debug(\"Part of speech name found: '{0}'\".format(pos[0]))\n return pos[0]\n if len(pos_entry) == 3 and pos_key != pos_code:\n sub_map = pos_entry[2]\n logger.debug(\n \"Found parent part of speech name '{0}'. Descending to look for child name for '{1}'\"\n .format(pos_entry[1], pos_code))\n sub_pos = _get_pos_name(pos_code, names, english, sub_map)\n if names == 'all':\n pos = pos + sub_pos if sub_pos else pos\n else:\n pos = sub_pos,\n name = pos if names == 'all' else pos[-1]\n logger.debug(\"Part of speech name found: '{0}'\".format(name))\n return name\n\n\ndef get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):\n \"\"\"Gets the part of speech name for *code*.\n\n :param str code: The part of speech code to lookup, e.g. ``'nsf'``.\n :param str name: Which part of speech name to include in the output. Must\n be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.\n Defaults to ``'parent'``. ``'parent'`` indicates that only the most\n generic name should be used, e.g. ``'noun'`` for ``'nsf'``.\n ``'child'`` indicates that the most specific name should be used, e.g.\n ``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all\n names should be used, e.g. ``('noun', 'toponym',\n 'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the\n part of speech code is not transformed at all.\n :param bool english: Whether to return an English or Chinese name.\n :param dict pos_tags: Custom part of speech tags to use.\n :returns: ``str`` if *name* is ``'parent'`` or ``'child'``.\n ``tuple`` if *name* is ``'all'``.\n\n \"\"\"\n return _get_pos_name(code, name, english, pos_tags)\n",
"step-3": "<mask token>\nlogger = logging.getLogger('pynlpir.pos_map')\nPOS_MAP = {'n': ('名词', 'noun', {'nr': ('人名', 'personal name', {'nr1': (\n '汉语姓氏', 'Chinese surname'), 'nr2': ('汉语名字', 'Chinese given name'),\n 'nrj': ('日语人名', 'Japanese personal name'), 'nrf': ('音译人名',\n 'transcribed personal name')}), 'ns': ('地名', 'toponym', {'nsf': ('音译地名',\n 'transcribed toponym')}), 'nt': ('机构团体名', 'organization/group name'),\n 'nz': ('其它专名', 'other proper noun'), 'nl': ('名词性惯用语', 'noun phrase'),\n 'ng': ('名词性语素', 'noun morpheme')}), 't': ('时间词', 'time word', {'tg': (\n '时间词性语素', 'time morpheme')}), 's': ('处所词', 'locative word'), 'f': (\n '方位词', 'noun of locality'), 'v': ('动词', 'verb', {'vd': ('副动词',\n 'auxiliary verb'), 'vn': ('名动词', 'noun-verb'), 'vshi': ('动词\"是\"',\n 'verb 是'), 'vyou': ('动词\"有\"', 'verb 有'), 'vf': ('趋向动词',\n 'directional verb'), 'vx': ('行事动词', 'performative verb'), 'vi': (\n '不及物动词', 'intransitive verb'), 'vl': ('动词性惯用语', 'verb phrase'), 'vg': (\n '动词性语素', 'verb morpheme')}), 'a': ('形容词', 'adjective', {'ad': ('副形词',\n 'auxiliary adjective'), 'an': ('名形词', 'noun-adjective'), 'ag': (\n '形容词性语素', 'adjective morpheme'), 'al': ('形容词性惯用语', 'adjective phrase')}\n ), 'b': ('区别词', 'distinguishing word', {'bl': ('区别词性惯用语',\n 'distinguishing phrase')}), 'z': ('状态词', 'status word'), 'r': ('代词',\n 'pronoun', {'rr': ('人称代词', 'personal pronoun'), 'rz': ('指示代词',\n 'demonstrative pronoun', {'rzt': ('时间指示代词',\n 'temporal demonstrative pronoun'), 'rzs': ('处所指示代词',\n 'locative demonstrative pronoun'), 'rzv': ('谓词性指示代词',\n 'predicate demonstrative pronoun')}), 'ry': ('疑问代词',\n 'interrogative pronoun', {'ryt': ('时间疑问代词',\n 'temporal interrogative pronoun'), 'rys': ('处所疑问代词',\n 'locative interrogative pronoun'), 'ryv': ('谓词性疑问代词',\n 'predicate interrogative pronoun')}), 'rg': ('代词性语素',\n 'pronoun morpheme')}), 'm': ('数词', 'numeral', {'mq': ('数量词',\n 'numeral-plus-classifier compound'), 'mg': ('干支', 'zodiac')}), 'q': (\n '量词', 'classifier', {'qv': ('动量词', 'verbal classifier'), 'qt': ('时量词',\n 'temporal classifier')}), 'd': ('副词', 'adverb'), 'p': ('介词',\n 'preposition', {'pba': ('介词“把”', 'preposition 把'), 'pbei': ('介词“被”',\n 'preposition 被')}), 'c': ('连词', 'conjunction', {'cc': ('并列连词',\n 'coordinating conjunction')}), 'u': ('助词', 'particle', {'uzhe': ('着',\n 'particle 着'), 'ule': ('了/喽', 'particle 了/喽'), 'uguo': ('过',\n 'particle 过'), 'ude1': ('的/底', 'particle 的/底'), 'ude2': ('地',\n 'particle 地'), 'ude3': ('得', 'particle 得'), 'usuo': ('所', 'particle 所'),\n 'udeng': ('等/等等/云云', 'particle 等/等等/云云'), 'uyy': ('一样/一般/似的/般',\n 'particle 一样/一般/似的/般'), 'udh': ('的话', 'particle 的话'), 'uls': (\n '来讲/来说/而言/说来', 'particle 来讲/来说/而言/说来'), 'uzhi': ('之', 'particle 之'),\n 'ulian': ('连', 'particle 连')}), 'e': ('叹词', 'interjection'), 'y': (\n '语气词', 'modal particle'), 'o': ('拟声词', 'onomatopoeia'), 'h': ('前缀',\n 'prefix'), 'k': ('后缀', 'suffix'), 'x': ('字符串', 'string', {'xe': (\n 'Email字符串', 'email address'), 'xs': ('微博会话分隔符', 'hashtag'), 'xm': (\n '表情符合', 'emoticon'), 'xu': ('网址URL', 'URL'), 'xx': ('非语素字',\n 'non-morpheme character')}), 'w': ('标点符号', 'punctuation mark', {'wkz':\n ('左括号', 'left parenthesis/bracket'), 'wky': ('右括号',\n 'right parenthesis/bracket'), 'wyz': ('左引号', 'left quotation mark'),\n 'wyy': ('右引号', 'right quotation mark'), 'wj': ('句号', 'period'), 'ww': (\n '问号', 'question mark'), 'wt': ('叹号', 'exclamation mark'), 'wd': ('逗号',\n 'comma'), 'wf': ('分号', 'semicolon'), 'wn': ('顿号', 'enumeration comma'),\n 'wm': ('冒号', 'colon'), 'ws': ('省略号', 'ellipsis'), 'wp': ('破折号', 'dash'),\n 'wb': ('百分号千分号', 'percent/per mille sign'), 'wh': ('单位符号',\n 'unit of measure sign')}), 'g': ('复合语', 'multiword expression'), 'j': (\n '略语', 'abbreviation')}\n\n\ndef _get_pos_name(pos_code, names='parent', english=True, pos_map=POS_MAP):\n \"\"\"Gets the part of speech name for *pos_code*.\"\"\"\n if names not in ('parent', 'child', 'all', 'raw'):\n raise ValueError(\n \"names must be one of 'parent', 'child', 'all', or 'raw'; not '{0}'\"\n .format(names))\n logger.debug(\"Getting {0} POS name for '{1}' formatted as '{2}'.\".\n format('English' if english else 'Chinese', pos_code, names))\n if names == 'raw':\n return pos_code\n pos_code = pos_code.lower()\n for i in range(1, len(pos_code) + 1):\n try:\n pos_key = pos_code[0:i]\n pos_entry = pos_map[pos_key]\n break\n except KeyError:\n if i == len(pos_code):\n logger.warning(\"part of speech not recognized: '{0}'\".\n format(pos_code))\n return None\n pos = pos_entry[1 if english else 0],\n if names == 'parent':\n logger.debug(\"Part of speech name found: '{0}'\".format(pos[0]))\n return pos[0]\n if len(pos_entry) == 3 and pos_key != pos_code:\n sub_map = pos_entry[2]\n logger.debug(\n \"Found parent part of speech name '{0}'. Descending to look for child name for '{1}'\"\n .format(pos_entry[1], pos_code))\n sub_pos = _get_pos_name(pos_code, names, english, sub_map)\n if names == 'all':\n pos = pos + sub_pos if sub_pos else pos\n else:\n pos = sub_pos,\n name = pos if names == 'all' else pos[-1]\n logger.debug(\"Part of speech name found: '{0}'\".format(name))\n return name\n\n\ndef get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):\n \"\"\"Gets the part of speech name for *code*.\n\n :param str code: The part of speech code to lookup, e.g. ``'nsf'``.\n :param str name: Which part of speech name to include in the output. Must\n be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.\n Defaults to ``'parent'``. ``'parent'`` indicates that only the most\n generic name should be used, e.g. ``'noun'`` for ``'nsf'``.\n ``'child'`` indicates that the most specific name should be used, e.g.\n ``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all\n names should be used, e.g. ``('noun', 'toponym',\n 'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the\n part of speech code is not transformed at all.\n :param bool english: Whether to return an English or Chinese name.\n :param dict pos_tags: Custom part of speech tags to use.\n :returns: ``str`` if *name* is ``'parent'`` or ``'child'``.\n ``tuple`` if *name* is ``'all'``.\n\n \"\"\"\n return _get_pos_name(code, name, english, pos_tags)\n",
"step-4": "<mask token>\nimport logging\nlogger = logging.getLogger('pynlpir.pos_map')\nPOS_MAP = {'n': ('名词', 'noun', {'nr': ('人名', 'personal name', {'nr1': (\n '汉语姓氏', 'Chinese surname'), 'nr2': ('汉语名字', 'Chinese given name'),\n 'nrj': ('日语人名', 'Japanese personal name'), 'nrf': ('音译人名',\n 'transcribed personal name')}), 'ns': ('地名', 'toponym', {'nsf': ('音译地名',\n 'transcribed toponym')}), 'nt': ('机构团体名', 'organization/group name'),\n 'nz': ('其它专名', 'other proper noun'), 'nl': ('名词性惯用语', 'noun phrase'),\n 'ng': ('名词性语素', 'noun morpheme')}), 't': ('时间词', 'time word', {'tg': (\n '时间词性语素', 'time morpheme')}), 's': ('处所词', 'locative word'), 'f': (\n '方位词', 'noun of locality'), 'v': ('动词', 'verb', {'vd': ('副动词',\n 'auxiliary verb'), 'vn': ('名动词', 'noun-verb'), 'vshi': ('动词\"是\"',\n 'verb 是'), 'vyou': ('动词\"有\"', 'verb 有'), 'vf': ('趋向动词',\n 'directional verb'), 'vx': ('行事动词', 'performative verb'), 'vi': (\n '不及物动词', 'intransitive verb'), 'vl': ('动词性惯用语', 'verb phrase'), 'vg': (\n '动词性语素', 'verb morpheme')}), 'a': ('形容词', 'adjective', {'ad': ('副形词',\n 'auxiliary adjective'), 'an': ('名形词', 'noun-adjective'), 'ag': (\n '形容词性语素', 'adjective morpheme'), 'al': ('形容词性惯用语', 'adjective phrase')}\n ), 'b': ('区别词', 'distinguishing word', {'bl': ('区别词性惯用语',\n 'distinguishing phrase')}), 'z': ('状态词', 'status word'), 'r': ('代词',\n 'pronoun', {'rr': ('人称代词', 'personal pronoun'), 'rz': ('指示代词',\n 'demonstrative pronoun', {'rzt': ('时间指示代词',\n 'temporal demonstrative pronoun'), 'rzs': ('处所指示代词',\n 'locative demonstrative pronoun'), 'rzv': ('谓词性指示代词',\n 'predicate demonstrative pronoun')}), 'ry': ('疑问代词',\n 'interrogative pronoun', {'ryt': ('时间疑问代词',\n 'temporal interrogative pronoun'), 'rys': ('处所疑问代词',\n 'locative interrogative pronoun'), 'ryv': ('谓词性疑问代词',\n 'predicate interrogative pronoun')}), 'rg': ('代词性语素',\n 'pronoun morpheme')}), 'm': ('数词', 'numeral', {'mq': ('数量词',\n 'numeral-plus-classifier compound'), 'mg': ('干支', 'zodiac')}), 'q': (\n '量词', 'classifier', {'qv': ('动量词', 'verbal classifier'), 'qt': ('时量词',\n 'temporal classifier')}), 'd': ('副词', 'adverb'), 'p': ('介词',\n 'preposition', {'pba': ('介词“把”', 'preposition 把'), 'pbei': ('介词“被”',\n 'preposition 被')}), 'c': ('连词', 'conjunction', {'cc': ('并列连词',\n 'coordinating conjunction')}), 'u': ('助词', 'particle', {'uzhe': ('着',\n 'particle 着'), 'ule': ('了/喽', 'particle 了/喽'), 'uguo': ('过',\n 'particle 过'), 'ude1': ('的/底', 'particle 的/底'), 'ude2': ('地',\n 'particle 地'), 'ude3': ('得', 'particle 得'), 'usuo': ('所', 'particle 所'),\n 'udeng': ('等/等等/云云', 'particle 等/等等/云云'), 'uyy': ('一样/一般/似的/般',\n 'particle 一样/一般/似的/般'), 'udh': ('的话', 'particle 的话'), 'uls': (\n '来讲/来说/而言/说来', 'particle 来讲/来说/而言/说来'), 'uzhi': ('之', 'particle 之'),\n 'ulian': ('连', 'particle 连')}), 'e': ('叹词', 'interjection'), 'y': (\n '语气词', 'modal particle'), 'o': ('拟声词', 'onomatopoeia'), 'h': ('前缀',\n 'prefix'), 'k': ('后缀', 'suffix'), 'x': ('字符串', 'string', {'xe': (\n 'Email字符串', 'email address'), 'xs': ('微博会话分隔符', 'hashtag'), 'xm': (\n '表情符合', 'emoticon'), 'xu': ('网址URL', 'URL'), 'xx': ('非语素字',\n 'non-morpheme character')}), 'w': ('标点符号', 'punctuation mark', {'wkz':\n ('左括号', 'left parenthesis/bracket'), 'wky': ('右括号',\n 'right parenthesis/bracket'), 'wyz': ('左引号', 'left quotation mark'),\n 'wyy': ('右引号', 'right quotation mark'), 'wj': ('句号', 'period'), 'ww': (\n '问号', 'question mark'), 'wt': ('叹号', 'exclamation mark'), 'wd': ('逗号',\n 'comma'), 'wf': ('分号', 'semicolon'), 'wn': ('顿号', 'enumeration comma'),\n 'wm': ('冒号', 'colon'), 'ws': ('省略号', 'ellipsis'), 'wp': ('破折号', 'dash'),\n 'wb': ('百分号千分号', 'percent/per mille sign'), 'wh': ('单位符号',\n 'unit of measure sign')}), 'g': ('复合语', 'multiword expression'), 'j': (\n '略语', 'abbreviation')}\n\n\ndef _get_pos_name(pos_code, names='parent', english=True, pos_map=POS_MAP):\n \"\"\"Gets the part of speech name for *pos_code*.\"\"\"\n if names not in ('parent', 'child', 'all', 'raw'):\n raise ValueError(\n \"names must be one of 'parent', 'child', 'all', or 'raw'; not '{0}'\"\n .format(names))\n logger.debug(\"Getting {0} POS name for '{1}' formatted as '{2}'.\".\n format('English' if english else 'Chinese', pos_code, names))\n if names == 'raw':\n return pos_code\n pos_code = pos_code.lower()\n for i in range(1, len(pos_code) + 1):\n try:\n pos_key = pos_code[0:i]\n pos_entry = pos_map[pos_key]\n break\n except KeyError:\n if i == len(pos_code):\n logger.warning(\"part of speech not recognized: '{0}'\".\n format(pos_code))\n return None\n pos = pos_entry[1 if english else 0],\n if names == 'parent':\n logger.debug(\"Part of speech name found: '{0}'\".format(pos[0]))\n return pos[0]\n if len(pos_entry) == 3 and pos_key != pos_code:\n sub_map = pos_entry[2]\n logger.debug(\n \"Found parent part of speech name '{0}'. Descending to look for child name for '{1}'\"\n .format(pos_entry[1], pos_code))\n sub_pos = _get_pos_name(pos_code, names, english, sub_map)\n if names == 'all':\n pos = pos + sub_pos if sub_pos else pos\n else:\n pos = sub_pos,\n name = pos if names == 'all' else pos[-1]\n logger.debug(\"Part of speech name found: '{0}'\".format(name))\n return name\n\n\ndef get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):\n \"\"\"Gets the part of speech name for *code*.\n\n :param str code: The part of speech code to lookup, e.g. ``'nsf'``.\n :param str name: Which part of speech name to include in the output. Must\n be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.\n Defaults to ``'parent'``. ``'parent'`` indicates that only the most\n generic name should be used, e.g. ``'noun'`` for ``'nsf'``.\n ``'child'`` indicates that the most specific name should be used, e.g.\n ``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all\n names should be used, e.g. ``('noun', 'toponym',\n 'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the\n part of speech code is not transformed at all.\n :param bool english: Whether to return an English or Chinese name.\n :param dict pos_tags: Custom part of speech tags to use.\n :returns: ``str`` if *name* is ``'parent'`` or ``'child'``.\n ``tuple`` if *name* is ``'all'``.\n\n \"\"\"\n return _get_pos_name(code, name, english, pos_tags)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"Part of speech mapping constants and functions for NLPIR/ICTCLAS.\n\nThis module is used by :mod:`pynlpir` to format segmented words for output.\n\n\"\"\"\nimport logging\n\n\nlogger = logging.getLogger(\"pynlpir.pos_map\")\n\n#: A dictionary that maps part of speech codes returned by NLPIR to\n#: human-readable names (English and Chinese).\nPOS_MAP = {\n \"n\": (\n \"名词\",\n \"noun\",\n {\n \"nr\": (\n \"人名\",\n \"personal name\",\n {\n \"nr1\": (\"汉语姓氏\", \"Chinese surname\"),\n \"nr2\": (\"汉语名字\", \"Chinese given name\"),\n \"nrj\": (\"日语人名\", \"Japanese personal name\"),\n \"nrf\": (\"音译人名\", \"transcribed personal name\"),\n },\n ),\n \"ns\": (\n \"地名\",\n \"toponym\",\n {\n \"nsf\": (\"音译地名\", \"transcribed toponym\"),\n },\n ),\n \"nt\": (\"机构团体名\", \"organization/group name\"),\n \"nz\": (\"其它专名\", \"other proper noun\"),\n \"nl\": (\"名词性惯用语\", \"noun phrase\"),\n \"ng\": (\"名词性语素\", \"noun morpheme\"),\n },\n ),\n \"t\": (\n \"时间词\",\n \"time word\",\n {\n \"tg\": (\"时间词性语素\", \"time morpheme\"),\n },\n ),\n \"s\": (\"处所词\", \"locative word\"),\n \"f\": (\"方位词\", \"noun of locality\"),\n \"v\": (\n \"动词\",\n \"verb\",\n {\n \"vd\": (\"副动词\", \"auxiliary verb\"),\n \"vn\": (\"名动词\", \"noun-verb\"),\n \"vshi\": ('动词\"是\"', \"verb 是\"),\n \"vyou\": ('动词\"有\"', \"verb 有\"),\n \"vf\": (\"趋向动词\", \"directional verb\"),\n \"vx\": (\"行事动词\", \"performative verb\"),\n \"vi\": (\"不及物动词\", \"intransitive verb\"),\n \"vl\": (\"动词性惯用语\", \"verb phrase\"),\n \"vg\": (\"动词性语素\", \"verb morpheme\"),\n },\n ),\n \"a\": (\n \"形容词\",\n \"adjective\",\n {\n \"ad\": (\"副形词\", \"auxiliary adjective\"),\n \"an\": (\"名形词\", \"noun-adjective\"),\n \"ag\": (\"形容词性语素\", \"adjective morpheme\"),\n \"al\": (\"形容词性惯用语\", \"adjective phrase\"),\n },\n ),\n \"b\": (\n \"区别词\",\n \"distinguishing word\",\n {\n \"bl\": (\"区别词性惯用语\", \"distinguishing phrase\"),\n },\n ),\n \"z\": (\"状态词\", \"status word\"),\n \"r\": (\n \"代词\",\n \"pronoun\",\n {\n \"rr\": (\"人称代词\", \"personal pronoun\"),\n \"rz\": (\n \"指示代词\",\n \"demonstrative pronoun\",\n {\n \"rzt\": (\"时间指示代词\", \"temporal demonstrative pronoun\"),\n \"rzs\": (\"处所指示代词\", \"locative demonstrative pronoun\"),\n \"rzv\": (\"谓词性指示代词\", \"predicate demonstrative pronoun\"),\n },\n ),\n \"ry\": (\n \"疑问代词\",\n \"interrogative pronoun\",\n {\n \"ryt\": (\"时间疑问代词\", \"temporal interrogative pronoun\"),\n \"rys\": (\"处所疑问代词\", \"locative interrogative pronoun\"),\n \"ryv\": (\"谓词性疑问代词\", \"predicate interrogative pronoun\"),\n },\n ),\n \"rg\": (\"代词性语素\", \"pronoun morpheme\"),\n },\n ),\n \"m\": (\n \"数词\",\n \"numeral\",\n {\n \"mq\": (\"数量词\", \"numeral-plus-classifier compound\"),\n \"mg\": (\"干支\", \"zodiac\"),\n },\n ),\n \"q\": (\n \"量词\",\n \"classifier\",\n {\n \"qv\": (\"动量词\", \"verbal classifier\"),\n \"qt\": (\"时量词\", \"temporal classifier\"),\n },\n ),\n \"d\": (\"副词\", \"adverb\"),\n \"p\": (\n \"介词\",\n \"preposition\",\n {\n \"pba\": (\"介词“把”\", \"preposition 把\"),\n \"pbei\": (\"介词“被”\", \"preposition 被\"),\n },\n ),\n \"c\": (\n \"连词\",\n \"conjunction\",\n {\n \"cc\": (\"并列连词\", \"coordinating conjunction\"),\n },\n ),\n \"u\": (\n \"助词\",\n \"particle\",\n {\n \"uzhe\": (\"着\", \"particle 着\"),\n \"ule\": (\"了/喽\", \"particle 了/喽\"),\n \"uguo\": (\"过\", \"particle 过\"),\n \"ude1\": (\"的/底\", \"particle 的/底\"),\n \"ude2\": (\"地\", \"particle 地\"),\n \"ude3\": (\"得\", \"particle 得\"),\n \"usuo\": (\"所\", \"particle 所\"),\n \"udeng\": (\"等/等等/云云\", \"particle 等/等等/云云\"),\n \"uyy\": (\"一样/一般/似的/般\", \"particle 一样/一般/似的/般\"),\n \"udh\": (\"的话\", \"particle 的话\"),\n \"uls\": (\"来讲/来说/而言/说来\", \"particle 来讲/来说/而言/说来\"),\n \"uzhi\": (\"之\", \"particle 之\"),\n \"ulian\": (\"连\", \"particle 连\"),\n },\n ),\n \"e\": (\"叹词\", \"interjection\"),\n \"y\": (\"语气词\", \"modal particle\"),\n \"o\": (\"拟声词\", \"onomatopoeia\"),\n \"h\": (\"前缀\", \"prefix\"),\n \"k\": (\"后缀\", \"suffix\"),\n \"x\": (\n \"字符串\",\n \"string\",\n {\n \"xe\": (\"Email字符串\", \"email address\"),\n \"xs\": (\"微博会话分隔符\", \"hashtag\"),\n \"xm\": (\"表情符合\", \"emoticon\"),\n \"xu\": (\"网址URL\", \"URL\"),\n \"xx\": (\"非语素字\", \"non-morpheme character\"),\n },\n ),\n \"w\": (\n \"标点符号\",\n \"punctuation mark\",\n {\n \"wkz\": (\"左括号\", \"left parenthesis/bracket\"),\n \"wky\": (\"右括号\", \"right parenthesis/bracket\"),\n \"wyz\": (\"左引号\", \"left quotation mark\"),\n \"wyy\": (\"右引号\", \"right quotation mark\"),\n \"wj\": (\"句号\", \"period\"),\n \"ww\": (\"问号\", \"question mark\"),\n \"wt\": (\"叹号\", \"exclamation mark\"),\n \"wd\": (\"逗号\", \"comma\"),\n \"wf\": (\"分号\", \"semicolon\"),\n \"wn\": (\"顿号\", \"enumeration comma\"),\n \"wm\": (\"冒号\", \"colon\"),\n \"ws\": (\"省略号\", \"ellipsis\"),\n \"wp\": (\"破折号\", \"dash\"),\n \"wb\": (\"百分号千分号\", \"percent/per mille sign\"),\n \"wh\": (\"单位符号\", \"unit of measure sign\"),\n },\n ),\n \"g\": (\"复合语\", \"multiword expression\"),\n \"j\": (\"略语\", \"abbreviation\"),\n}\n\n\ndef _get_pos_name(pos_code, names=\"parent\", english=True, pos_map=POS_MAP):\n \"\"\"Gets the part of speech name for *pos_code*.\"\"\"\n if names not in (\"parent\", \"child\", \"all\", \"raw\"):\n raise ValueError(\n \"names must be one of 'parent', 'child', 'all', or \"\n \"'raw'; not '{0}'\".format(names)\n )\n logger.debug(\n \"Getting {0} POS name for '{1}' formatted as '{2}'.\".format(\n \"English\" if english else \"Chinese\", pos_code, names\n )\n )\n if names == \"raw\":\n return pos_code\n pos_code = pos_code.lower() # Issue #10\n for i in range(1, len(pos_code) + 1):\n try:\n pos_key = pos_code[0:i]\n pos_entry = pos_map[pos_key]\n break\n except KeyError:\n if i == len(pos_code):\n logger.warning(\"part of speech not recognized: '{0}'\".format(pos_code))\n return None # Issue #20\n pos = (pos_entry[1 if english else 0],)\n if names == \"parent\":\n logger.debug(\"Part of speech name found: '{0}'\".format(pos[0]))\n return pos[0]\n if len(pos_entry) == 3 and pos_key != pos_code:\n sub_map = pos_entry[2]\n logger.debug(\n \"Found parent part of speech name '{0}'. Descending to \"\n \"look for child name for '{1}'\".format(pos_entry[1], pos_code)\n )\n sub_pos = _get_pos_name(pos_code, names, english, sub_map)\n\n if names == \"all\":\n # sub_pos can be None sometimes (e.g. for a word '甲')\n pos = pos + sub_pos if sub_pos else pos\n else:\n pos = (sub_pos,)\n\n name = pos if names == \"all\" else pos[-1]\n logger.debug(\"Part of speech name found: '{0}'\".format(name))\n return name\n\n\ndef get_pos_name(code, name=\"parent\", english=True, pos_tags=POS_MAP):\n \"\"\"Gets the part of speech name for *code*.\n\n :param str code: The part of speech code to lookup, e.g. ``'nsf'``.\n :param str name: Which part of speech name to include in the output. Must\n be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.\n Defaults to ``'parent'``. ``'parent'`` indicates that only the most\n generic name should be used, e.g. ``'noun'`` for ``'nsf'``.\n ``'child'`` indicates that the most specific name should be used, e.g.\n ``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all\n names should be used, e.g. ``('noun', 'toponym',\n 'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the\n part of speech code is not transformed at all.\n :param bool english: Whether to return an English or Chinese name.\n :param dict pos_tags: Custom part of speech tags to use.\n :returns: ``str`` if *name* is ``'parent'`` or ``'child'``.\n ``tuple`` if *name* is ``'all'``.\n\n \"\"\"\n return _get_pos_name(code, name, english, pos_tags)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sense.set_pixels(prenume)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sense = SenseHat()
b = 0, 0, 204
w = 255, 255, 255
e = 0, 0, 0
y = 255, 255, 0
r = 255, 0, 0
prenume = [e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, b, e, y, y, e,
r, e, b, e, b, y, e, y, r, e, b, b, b, y, e, y, r, e, b, e, b, y, e, y,
r, e, b, e, b, y, y, e, r, e, e, e, e, e, e, e, e, e]
sense.set_pixels(prenume)
<|reserved_special_token_1|>
from sense_hat import SenseHat
import time
sense = SenseHat()
b = 0, 0, 204
w = 255, 255, 255
e = 0, 0, 0
y = 255, 255, 0
r = 255, 0, 0
prenume = [e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, b, e, y, y, e,
r, e, b, e, b, y, e, y, r, e, b, b, b, y, e, y, r, e, b, e, b, y, e, y,
r, e, b, e, b, y, y, e, r, e, e, e, e, e, e, e, e, e]
sense.set_pixels(prenume)
<|reserved_special_token_1|>
from sense_hat import SenseHat
import time
sense = SenseHat()
b = (0, 0, 204) #Blue
w = (255, 255, 255) #White
e = (0, 0, 0) #Empty
y = (255, 255, 0) #Yellow
r = (255, 0, 0) #red
prenume = [
e, e, e, e, e, e, e, e,
e, e, e, e, e, e, e, e,
e, b, e, y, y, e, r, e,
b, e, b, y, e, y, r, e,
b, b, b, y, e, y, r, e,
b, e, b, y, e, y, r, e,
b, e, b, y, y, e, r, e,
e, e, e, e, e, e, e, e,
]
sense.set_pixels(prenume)
|
flexible
|
{
"blob_id": "b9eeccbed63aa42afa09fe7ef782066f300255a1",
"index": 2173,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsense.set_pixels(prenume)\n",
"step-3": "<mask token>\nsense = SenseHat()\nb = 0, 0, 204\nw = 255, 255, 255\ne = 0, 0, 0\ny = 255, 255, 0\nr = 255, 0, 0\nprenume = [e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, b, e, y, y, e,\n r, e, b, e, b, y, e, y, r, e, b, b, b, y, e, y, r, e, b, e, b, y, e, y,\n r, e, b, e, b, y, y, e, r, e, e, e, e, e, e, e, e, e]\nsense.set_pixels(prenume)\n",
"step-4": "from sense_hat import SenseHat\nimport time\nsense = SenseHat()\nb = 0, 0, 204\nw = 255, 255, 255\ne = 0, 0, 0\ny = 255, 255, 0\nr = 255, 0, 0\nprenume = [e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, b, e, y, y, e,\n r, e, b, e, b, y, e, y, r, e, b, b, b, y, e, y, r, e, b, e, b, y, e, y,\n r, e, b, e, b, y, y, e, r, e, e, e, e, e, e, e, e, e]\nsense.set_pixels(prenume)\n",
"step-5": "from sense_hat import SenseHat\nimport time\n\nsense = SenseHat()\nb = (0, 0, 204) #Blue\nw = (255, 255, 255) #White\ne = (0, 0, 0) #Empty\ny = (255, 255, 0) #Yellow\nr = (255, 0, 0) #red\n\nprenume = [\n e, e, e, e, e, e, e, e,\n e, e, e, e, e, e, e, e,\n e, b, e, y, y, e, r, e,\n b, e, b, y, e, y, r, e,\n b, b, b, y, e, y, r, e,\n b, e, b, y, e, y, r, e,\n b, e, b, y, y, e, r, e,\n e, e, e, e, e, e, e, e,\n ]\n\nsense.set_pixels(prenume)\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(max(d1.values()))
print(min(d1.values()))
<|reserved_special_token_1|>
d1 = {(6): 10, (2): 20, (5): 30, (4): 40, (1): 50, (3): 60}
print(max(d1.values()))
print(min(d1.values()))
<|reserved_special_token_1|>
#Write a Python program to get the maximum and minimum value in a dictionary.
d1={6: 10, 2: 20, 5: 30, 4: 40, 1: 50, 3: 60}
print(max(d1.values()))
print(min(d1.values()))
|
flexible
|
{
"blob_id": "53e397068fcf88bbbce4dcc1bf1b441a2fbbee48",
"index": 2261,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(max(d1.values()))\nprint(min(d1.values()))\n",
"step-3": "d1 = {(6): 10, (2): 20, (5): 30, (4): 40, (1): 50, (3): 60}\nprint(max(d1.values()))\nprint(min(d1.values()))\n",
"step-4": "#Write a Python program to get the maximum and minimum value in a dictionary.\n\nd1={6: 10, 2: 20, 5: 30, 4: 40, 1: 50, 3: 60}\n\nprint(max(d1.values()))\nprint(min(d1.values()))\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3.4
from flask import Flask, render_template, request, jsonify
from time import time
application = Flask(__name__)
@application.route("/chutesnladders")
@application.route("/cnl")
@application.route("/snakesnladders")
@application.route("/snl")
def chutesnladders():
response = application.make_response(
render_template(
'chutesnladders.min.html'
)
)
return response
@application.route("/report", methods=['GET', 'POST'])
def reportBug():
d = {}
if request.method == 'POST':
try:
email = request.form.get("email", type=str).lower()
kind = request.form.get("kind", type=str).lower()
title = kind.upper() + ": " + request.form.get("title", type=str)
details = request.form.get("details", type=str)
ts = int(time()*(10**6))
report = request.form.get("reportingEnabled", type=bool)
if report:
sendmail.sendMeResponse({
'ts': ts,
'feed_email': email,
'feed_name': email.split('@')[0],
'feed_message': details,
'feed_subject': title,
})
d = {"result": "Received"}
except Exception:
d = {"result": "Error in receiving"}
else:
d = {"result": "Bad request"}
response = application.make_response(jsonify(**d))
return response
# EVERY FUNCTION FOR CHUTES n LADDERS END HERE
if __name__ == "__main__":
application.run(
host='0.0.0.0',
debug=False
)
|
normal
|
{
"blob_id": "a2c62091b14929942b49853c4a30b851ede0004b",
"index": 4563,
"step-1": "<mask token>\n\n\n@application.route('/chutesnladders')\n@application.route('/cnl')\n@application.route('/snakesnladders')\n@application.route('/snl')\ndef chutesnladders():\n response = application.make_response(render_template(\n 'chutesnladders.min.html'))\n return response\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@application.route('/chutesnladders')\n@application.route('/cnl')\n@application.route('/snakesnladders')\n@application.route('/snl')\ndef chutesnladders():\n response = application.make_response(render_template(\n 'chutesnladders.min.html'))\n return response\n\n\n@application.route('/report', methods=['GET', 'POST'])\ndef reportBug():\n d = {}\n if request.method == 'POST':\n try:\n email = request.form.get('email', type=str).lower()\n kind = request.form.get('kind', type=str).lower()\n title = kind.upper() + ': ' + request.form.get('title', type=str)\n details = request.form.get('details', type=str)\n ts = int(time() * 10 ** 6)\n report = request.form.get('reportingEnabled', type=bool)\n if report:\n sendmail.sendMeResponse({'ts': ts, 'feed_email': email,\n 'feed_name': email.split('@')[0], 'feed_message':\n details, 'feed_subject': title})\n d = {'result': 'Received'}\n except Exception:\n d = {'result': 'Error in receiving'}\n else:\n d = {'result': 'Bad request'}\n response = application.make_response(jsonify(**d))\n return response\n\n\nif __name__ == '__main__':\n application.run(host='0.0.0.0', debug=False)\n",
"step-3": "<mask token>\napplication = Flask(__name__)\n\n\n@application.route('/chutesnladders')\n@application.route('/cnl')\n@application.route('/snakesnladders')\n@application.route('/snl')\ndef chutesnladders():\n response = application.make_response(render_template(\n 'chutesnladders.min.html'))\n return response\n\n\n@application.route('/report', methods=['GET', 'POST'])\ndef reportBug():\n d = {}\n if request.method == 'POST':\n try:\n email = request.form.get('email', type=str).lower()\n kind = request.form.get('kind', type=str).lower()\n title = kind.upper() + ': ' + request.form.get('title', type=str)\n details = request.form.get('details', type=str)\n ts = int(time() * 10 ** 6)\n report = request.form.get('reportingEnabled', type=bool)\n if report:\n sendmail.sendMeResponse({'ts': ts, 'feed_email': email,\n 'feed_name': email.split('@')[0], 'feed_message':\n details, 'feed_subject': title})\n d = {'result': 'Received'}\n except Exception:\n d = {'result': 'Error in receiving'}\n else:\n d = {'result': 'Bad request'}\n response = application.make_response(jsonify(**d))\n return response\n\n\nif __name__ == '__main__':\n application.run(host='0.0.0.0', debug=False)\n",
"step-4": "from flask import Flask, render_template, request, jsonify\nfrom time import time\napplication = Flask(__name__)\n\n\n@application.route('/chutesnladders')\n@application.route('/cnl')\n@application.route('/snakesnladders')\n@application.route('/snl')\ndef chutesnladders():\n response = application.make_response(render_template(\n 'chutesnladders.min.html'))\n return response\n\n\n@application.route('/report', methods=['GET', 'POST'])\ndef reportBug():\n d = {}\n if request.method == 'POST':\n try:\n email = request.form.get('email', type=str).lower()\n kind = request.form.get('kind', type=str).lower()\n title = kind.upper() + ': ' + request.form.get('title', type=str)\n details = request.form.get('details', type=str)\n ts = int(time() * 10 ** 6)\n report = request.form.get('reportingEnabled', type=bool)\n if report:\n sendmail.sendMeResponse({'ts': ts, 'feed_email': email,\n 'feed_name': email.split('@')[0], 'feed_message':\n details, 'feed_subject': title})\n d = {'result': 'Received'}\n except Exception:\n d = {'result': 'Error in receiving'}\n else:\n d = {'result': 'Bad request'}\n response = application.make_response(jsonify(**d))\n return response\n\n\nif __name__ == '__main__':\n application.run(host='0.0.0.0', debug=False)\n",
"step-5": "#!/usr/bin/env python3.4\n\nfrom flask import Flask, render_template, request, jsonify\nfrom time import time\n\napplication = Flask(__name__)\n\n\n@application.route(\"/chutesnladders\")\n@application.route(\"/cnl\")\n@application.route(\"/snakesnladders\")\n@application.route(\"/snl\")\ndef chutesnladders():\n response = application.make_response(\n render_template(\n 'chutesnladders.min.html'\n )\n )\n return response\n\n\n@application.route(\"/report\", methods=['GET', 'POST'])\ndef reportBug():\n d = {}\n if request.method == 'POST':\n try:\n email = request.form.get(\"email\", type=str).lower()\n kind = request.form.get(\"kind\", type=str).lower()\n title = kind.upper() + \": \" + request.form.get(\"title\", type=str)\n details = request.form.get(\"details\", type=str)\n ts = int(time()*(10**6))\n report = request.form.get(\"reportingEnabled\", type=bool)\n if report:\n sendmail.sendMeResponse({\n 'ts': ts,\n 'feed_email': email,\n 'feed_name': email.split('@')[0],\n 'feed_message': details,\n 'feed_subject': title,\n })\n d = {\"result\": \"Received\"}\n except Exception:\n d = {\"result\": \"Error in receiving\"}\n else:\n d = {\"result\": \"Bad request\"}\n response = application.make_response(jsonify(**d))\n return response\n# EVERY FUNCTION FOR CHUTES n LADDERS END HERE\n\nif __name__ == \"__main__\":\n application.run(\n host='0.0.0.0',\n debug=False\n )\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#!/usr/bin/python
#import Bio
def findLCS(read, cassette, rIndex, cIndex,cassettes):
LCS=''
while True:
if read[rIndex] == cassette[cIndex]:
LCS+= read[rIndex]
rIndex= rIndex +1
cIndex= cIndex +1
#elif checkLCS(cIndex,cassettes)==True:
else:
break
#print(LCS)
return LCS
def findMaxLCS(read, cassettes, rIndex, cIndex):
#print(read)
maxLCS=''
#print(len(cassettes))
for i in range (0,len(cassettes)):
LCS=findLCS(read, cassettes[i],rIndex, cIndex,cassettes)
if len(LCS) > len(maxLCS):
maxLCS=LCS
rIndex= rIndex+len(maxLCS)
cIndex= cIndex+len(maxLCS)
return maxLCS ,rIndex ,cIndex
def findConsensus(cassettes, cIndex):
#print (cassettes)
con=[]
for i in range(0,len(cassettes[1])-26):
holder=[]
for j in range(0,len(cassettes)):
holder.append(cassettes[j][i])
con.append(holder)
con2=[]
for k in range (0,len(con)):
if con[k].count('G')==16 or (con[k].count('G')==14) :
con2.append('g')
elif con[k].count('A')==16 or (con[k].count('A')==14): #con[k][1]=='-'
con2.append('a')
elif con[k].count('C')==16 or (con[k].count('C')==14):
con2.append('c')
elif con[k].count('T')==16 or (con[k].count('T')==14):
con2.append('t')
elif con[k].count('-')>=10:
con2.append('-')
else:
con2.append('n')
#print(con)
#print(con2)
return con2[cIndex]
def checkGap(LCS, cassettes, cIndex):
#print(rIndex)
#print(cIndex)
#nuc= findConsensus(cassettes, cIndex)
#LCS=LCS+ str(nuc)
#cIndex=cIndex+1
if findConsensus(cassettes, cIndex)== '-':
LCS=LCS+'-'
cIndex=cIndex+1
return LCS, cIndex
else:
return LCS, cIndex
#print(rIndex)
#elif findConsens
#elif (findConsensus(cassettes, cIndex)).isalpha():
def deletenuc(read, cassettes, rIndex, cIndex):
if len(findMaxLCS(read, cassettes, rIndex+1, cIndex))>=3:
return True
else:
return False
def insertnuc(LCS, read, cassettes, rIndex, cIndex):
if len(findMaxLCS(read, cassettes, rIndex, cIndex+1))>=3:
return True
else:
return False
#def subsnuc(
#def checkgaps(
def align(read, cassettes):
#print(read)
#print('hi')
#print(cassettes)
rIndex=0
cIndex=0
alignedRead=''
LCS=''
delrec=[]
insertrec=[]
substrec=[]
#print(read)
while rIndex<= len(read):
#print(read)
#print(len(read))
#print(rIndex)
LCS, rIndex, cIndex= findMaxLCS(read, cassettes,rIndex, cIndex)
#print(rIndex)
#print(cIndex)
#print(LCS)
LCS, cIndex= checkGap(LCS, cassettes,cIndex)
#print(rIndex,cIndex)
#print(LCS)
#if deletenuc(read, cassettes, rIndex,cIndex)==True:
#delrec.append(rIndex)
#rIndex= rIndex+1
if len(LCS)<=6 :
#print (LCS, rIndex)
#print('enter')
if insertnuc(LCS, read, cassettes, rIndex, cIndex)==True:
#print(True, LCS)
insertrec.append(rIndex)
nuc= findConsensus(cassettes, cIndex)
cIndex=cIndex+1
LCS= LCS+nuc
else:
LCS, cIndex= checkGap(LCS, cassettes,cIndex)
#elif subsnuc(LCS, read, cassettes, rIndex, cIndex)==True:
#else:
# LCS, cIndex= checkLCS(LCS, cassettes,cIndex)
# nuc= findConsensus(cassettes, cIndex)
# LCS= LCS+nuc
# cIndex=cIndex+1
# rIndex=rIndex+1
alignedRead= alignedRead+ str(LCS)
print(alignedRead)
return alignedRead
def main():
FASTA=input('Enter FASTA file:')
reference=input('Enter reference file:')
in_file=open(FASTA, 'r')
in_file1=open(reference,'r')
line_list=[]
line_list1=[]
for line in in_file:
line=line.strip()
line_list.append(line)
readnames=line_list[::2] #list of the read headers
reads=line_list[1::2] #list of sequences only
for line1 in in_file1:
line1=line1.strip()
line_list1.append(line1)
cassettes=line_list1[1::2]
refnames=line_list1[::2]
#for i in cassettes:
# print(len(i))
#print(cassettes)
#print(reads)
A=[]
for i in reads:
#print(i[0])
alignedRead=align(i,cassettes)
A.append(alignedRead)
#print(align(i,cassettes))
#out = open("out.txt", "w")
#out.write(align(i, cassettes)
#out.close()
#print(A)
#con=findConsensus(0,cassettes)
#print(con)
|
normal
|
{
"blob_id": "5cec9e82aa994d07e25d8356a8218fc461bb8b4e",
"index": 4728,
"step-1": "def findLCS(read, cassette, rIndex, cIndex, cassettes):\n LCS = ''\n while True:\n if read[rIndex] == cassette[cIndex]:\n LCS += read[rIndex]\n rIndex = rIndex + 1\n cIndex = cIndex + 1\n else:\n break\n return LCS\n\n\n<mask token>\n\n\ndef checkGap(LCS, cassettes, cIndex):\n if findConsensus(cassettes, cIndex) == '-':\n LCS = LCS + '-'\n cIndex = cIndex + 1\n return LCS, cIndex\n else:\n return LCS, cIndex\n\n\ndef deletenuc(read, cassettes, rIndex, cIndex):\n if len(findMaxLCS(read, cassettes, rIndex + 1, cIndex)) >= 3:\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef main():\n FASTA = input('Enter FASTA file:')\n reference = input('Enter reference file:')\n in_file = open(FASTA, 'r')\n in_file1 = open(reference, 'r')\n line_list = []\n line_list1 = []\n for line in in_file:\n line = line.strip()\n line_list.append(line)\n readnames = line_list[::2]\n reads = line_list[1::2]\n for line1 in in_file1:\n line1 = line1.strip()\n line_list1.append(line1)\n cassettes = line_list1[1::2]\n refnames = line_list1[::2]\n A = []\n for i in reads:\n alignedRead = align(i, cassettes)\n A.append(alignedRead)\n",
"step-2": "def findLCS(read, cassette, rIndex, cIndex, cassettes):\n LCS = ''\n while True:\n if read[rIndex] == cassette[cIndex]:\n LCS += read[rIndex]\n rIndex = rIndex + 1\n cIndex = cIndex + 1\n else:\n break\n return LCS\n\n\n<mask token>\n\n\ndef checkGap(LCS, cassettes, cIndex):\n if findConsensus(cassettes, cIndex) == '-':\n LCS = LCS + '-'\n cIndex = cIndex + 1\n return LCS, cIndex\n else:\n return LCS, cIndex\n\n\ndef deletenuc(read, cassettes, rIndex, cIndex):\n if len(findMaxLCS(read, cassettes, rIndex + 1, cIndex)) >= 3:\n return True\n else:\n return False\n\n\ndef insertnuc(LCS, read, cassettes, rIndex, cIndex):\n if len(findMaxLCS(read, cassettes, rIndex, cIndex + 1)) >= 3:\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef main():\n FASTA = input('Enter FASTA file:')\n reference = input('Enter reference file:')\n in_file = open(FASTA, 'r')\n in_file1 = open(reference, 'r')\n line_list = []\n line_list1 = []\n for line in in_file:\n line = line.strip()\n line_list.append(line)\n readnames = line_list[::2]\n reads = line_list[1::2]\n for line1 in in_file1:\n line1 = line1.strip()\n line_list1.append(line1)\n cassettes = line_list1[1::2]\n refnames = line_list1[::2]\n A = []\n for i in reads:\n alignedRead = align(i, cassettes)\n A.append(alignedRead)\n",
"step-3": "def findLCS(read, cassette, rIndex, cIndex, cassettes):\n LCS = ''\n while True:\n if read[rIndex] == cassette[cIndex]:\n LCS += read[rIndex]\n rIndex = rIndex + 1\n cIndex = cIndex + 1\n else:\n break\n return LCS\n\n\ndef findMaxLCS(read, cassettes, rIndex, cIndex):\n maxLCS = ''\n for i in range(0, len(cassettes)):\n LCS = findLCS(read, cassettes[i], rIndex, cIndex, cassettes)\n if len(LCS) > len(maxLCS):\n maxLCS = LCS\n rIndex = rIndex + len(maxLCS)\n cIndex = cIndex + len(maxLCS)\n return maxLCS, rIndex, cIndex\n\n\n<mask token>\n\n\ndef checkGap(LCS, cassettes, cIndex):\n if findConsensus(cassettes, cIndex) == '-':\n LCS = LCS + '-'\n cIndex = cIndex + 1\n return LCS, cIndex\n else:\n return LCS, cIndex\n\n\ndef deletenuc(read, cassettes, rIndex, cIndex):\n if len(findMaxLCS(read, cassettes, rIndex + 1, cIndex)) >= 3:\n return True\n else:\n return False\n\n\ndef insertnuc(LCS, read, cassettes, rIndex, cIndex):\n if len(findMaxLCS(read, cassettes, rIndex, cIndex + 1)) >= 3:\n return True\n else:\n return False\n\n\n<mask token>\n\n\ndef main():\n FASTA = input('Enter FASTA file:')\n reference = input('Enter reference file:')\n in_file = open(FASTA, 'r')\n in_file1 = open(reference, 'r')\n line_list = []\n line_list1 = []\n for line in in_file:\n line = line.strip()\n line_list.append(line)\n readnames = line_list[::2]\n reads = line_list[1::2]\n for line1 in in_file1:\n line1 = line1.strip()\n line_list1.append(line1)\n cassettes = line_list1[1::2]\n refnames = line_list1[::2]\n A = []\n for i in reads:\n alignedRead = align(i, cassettes)\n A.append(alignedRead)\n",
"step-4": "def findLCS(read, cassette, rIndex, cIndex, cassettes):\n LCS = ''\n while True:\n if read[rIndex] == cassette[cIndex]:\n LCS += read[rIndex]\n rIndex = rIndex + 1\n cIndex = cIndex + 1\n else:\n break\n return LCS\n\n\ndef findMaxLCS(read, cassettes, rIndex, cIndex):\n maxLCS = ''\n for i in range(0, len(cassettes)):\n LCS = findLCS(read, cassettes[i], rIndex, cIndex, cassettes)\n if len(LCS) > len(maxLCS):\n maxLCS = LCS\n rIndex = rIndex + len(maxLCS)\n cIndex = cIndex + len(maxLCS)\n return maxLCS, rIndex, cIndex\n\n\n<mask token>\n\n\ndef checkGap(LCS, cassettes, cIndex):\n if findConsensus(cassettes, cIndex) == '-':\n LCS = LCS + '-'\n cIndex = cIndex + 1\n return LCS, cIndex\n else:\n return LCS, cIndex\n\n\ndef deletenuc(read, cassettes, rIndex, cIndex):\n if len(findMaxLCS(read, cassettes, rIndex + 1, cIndex)) >= 3:\n return True\n else:\n return False\n\n\ndef insertnuc(LCS, read, cassettes, rIndex, cIndex):\n if len(findMaxLCS(read, cassettes, rIndex, cIndex + 1)) >= 3:\n return True\n else:\n return False\n\n\ndef align(read, cassettes):\n rIndex = 0\n cIndex = 0\n alignedRead = ''\n LCS = ''\n delrec = []\n insertrec = []\n substrec = []\n while rIndex <= len(read):\n LCS, rIndex, cIndex = findMaxLCS(read, cassettes, rIndex, cIndex)\n LCS, cIndex = checkGap(LCS, cassettes, cIndex)\n if len(LCS) <= 6:\n if insertnuc(LCS, read, cassettes, rIndex, cIndex) == True:\n insertrec.append(rIndex)\n nuc = findConsensus(cassettes, cIndex)\n cIndex = cIndex + 1\n LCS = LCS + nuc\n else:\n LCS, cIndex = checkGap(LCS, cassettes, cIndex)\n alignedRead = alignedRead + str(LCS)\n print(alignedRead)\n return alignedRead\n\n\ndef main():\n FASTA = input('Enter FASTA file:')\n reference = input('Enter reference file:')\n in_file = open(FASTA, 'r')\n in_file1 = open(reference, 'r')\n line_list = []\n line_list1 = []\n for line in in_file:\n line = line.strip()\n line_list.append(line)\n readnames = line_list[::2]\n reads = line_list[1::2]\n for line1 in in_file1:\n line1 = line1.strip()\n line_list1.append(line1)\n cassettes = line_list1[1::2]\n refnames = line_list1[::2]\n A = []\n for i in reads:\n alignedRead = align(i, cassettes)\n A.append(alignedRead)\n",
"step-5": "#!/usr/bin/python\n#import Bio\n\n \n\ndef findLCS(read, cassette, rIndex, cIndex,cassettes):\n \n LCS=''\n while True:\n if read[rIndex] == cassette[cIndex]:\n LCS+= read[rIndex]\n rIndex= rIndex +1\n cIndex= cIndex +1\n #elif checkLCS(cIndex,cassettes)==True:\n else:\n break\n\n #print(LCS)\n \n return LCS\n\ndef findMaxLCS(read, cassettes, rIndex, cIndex):\n #print(read)\n maxLCS=''\n #print(len(cassettes))\n for i in range (0,len(cassettes)):\n LCS=findLCS(read, cassettes[i],rIndex, cIndex,cassettes)\n \n if len(LCS) > len(maxLCS):\n \n maxLCS=LCS\n \n \n \n rIndex= rIndex+len(maxLCS)\n cIndex= cIndex+len(maxLCS)\n return maxLCS ,rIndex ,cIndex\n\ndef findConsensus(cassettes, cIndex):\n #print (cassettes)\n con=[]\n for i in range(0,len(cassettes[1])-26):\n holder=[]\n for j in range(0,len(cassettes)):\n holder.append(cassettes[j][i])\n con.append(holder)\n con2=[]\n for k in range (0,len(con)):\n if con[k].count('G')==16 or (con[k].count('G')==14) :\n con2.append('g')\n elif con[k].count('A')==16 or (con[k].count('A')==14): #con[k][1]=='-'\n con2.append('a')\n elif con[k].count('C')==16 or (con[k].count('C')==14):\n con2.append('c')\n elif con[k].count('T')==16 or (con[k].count('T')==14):\n con2.append('t')\n elif con[k].count('-')>=10:\n con2.append('-')\n else:\n con2.append('n')\n #print(con) \n #print(con2)\n\n return con2[cIndex]\n\ndef checkGap(LCS, cassettes, cIndex):\n \n #print(rIndex)\n #print(cIndex)\n\n #nuc= findConsensus(cassettes, cIndex)\n #LCS=LCS+ str(nuc)\n #cIndex=cIndex+1\n \n if findConsensus(cassettes, cIndex)== '-':\n LCS=LCS+'-'\n cIndex=cIndex+1\n return LCS, cIndex\n else:\n return LCS, cIndex\n #print(rIndex)\n #elif findConsens\n \n \n #elif (findConsensus(cassettes, cIndex)).isalpha():\n \n \n \n\ndef deletenuc(read, cassettes, rIndex, cIndex):\n\n if len(findMaxLCS(read, cassettes, rIndex+1, cIndex))>=3:\n \n return True\n else:\n return False\n \ndef insertnuc(LCS, read, cassettes, rIndex, cIndex):\n\n if len(findMaxLCS(read, cassettes, rIndex, cIndex+1))>=3:\n return True\n else:\n return False\n\n#def subsnuc(\n \n\n#def checkgaps(\n\n\ndef align(read, cassettes):\n #print(read)\n #print('hi')\n #print(cassettes)\n rIndex=0\n cIndex=0\n alignedRead=''\n LCS=''\n delrec=[]\n insertrec=[]\n substrec=[]\n \n #print(read)\n while rIndex<= len(read):\n #print(read)\n \n #print(len(read))\n #print(rIndex)\n LCS, rIndex, cIndex= findMaxLCS(read, cassettes,rIndex, cIndex)\n #print(rIndex)\n #print(cIndex)\n #print(LCS)\n LCS, cIndex= checkGap(LCS, cassettes,cIndex)\n \n #print(rIndex,cIndex)\n #print(LCS) \n \n #if deletenuc(read, cassettes, rIndex,cIndex)==True:\n #delrec.append(rIndex)\n #rIndex= rIndex+1\n if len(LCS)<=6 :\n #print (LCS, rIndex)\n #print('enter')\n if insertnuc(LCS, read, cassettes, rIndex, cIndex)==True:\n #print(True, LCS)\n insertrec.append(rIndex)\n nuc= findConsensus(cassettes, cIndex)\n cIndex=cIndex+1\n LCS= LCS+nuc\n else:\n LCS, cIndex= checkGap(LCS, cassettes,cIndex)\n \n #elif subsnuc(LCS, read, cassettes, rIndex, cIndex)==True:\n \n\n \n #else:\n # LCS, cIndex= checkLCS(LCS, cassettes,cIndex)\n\n \n \n\n \n # nuc= findConsensus(cassettes, cIndex)\n # LCS= LCS+nuc\n # cIndex=cIndex+1\n # rIndex=rIndex+1\n \n alignedRead= alignedRead+ str(LCS)\n print(alignedRead)\n \n return alignedRead\n\ndef main():\n FASTA=input('Enter FASTA file:')\n reference=input('Enter reference file:')\n in_file=open(FASTA, 'r')\n in_file1=open(reference,'r')\n\n\n line_list=[] \n line_list1=[]\n\n\n\n for line in in_file:\n line=line.strip()\n line_list.append(line)\n readnames=line_list[::2] #list of the read headers\n reads=line_list[1::2] #list of sequences only\n\n for line1 in in_file1:\n line1=line1.strip()\n line_list1.append(line1) \n cassettes=line_list1[1::2]\n refnames=line_list1[::2]\n\n #for i in cassettes:\n # print(len(i))\n #print(cassettes)\n #print(reads)\n A=[]\n for i in reads:\n #print(i[0])\n alignedRead=align(i,cassettes)\n A.append(alignedRead)\n #print(align(i,cassettes))\n #out = open(\"out.txt\", \"w\")\n #out.write(align(i, cassettes)\n #out.close()\n \n #print(A)\n #con=findConsensus(0,cassettes)\n #print(con)\n",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
# -*- encoding: utf-8 -*-
from openerp.tests.common import TransactionCase
from openerp.exceptions import ValidationError
class GlobalTestOpenAcademySession(TransactionCase):
'''
Global Test to openacademy session model.
Test create session and trigger constraint
'''
# Pseudo-constructor methods
def setUp(self):
# Define Global Variable to tests methods
super(GlobalTestOpenAcademySession, self).setUp()
self.session = self.env['openacademy.session']
self.partner_vauxoo = self.env.ref('base.res_partner_23')
self.course_id = self.env.ref('openacademy.course3')
self.partner_attende = self.env.ref('base.res_partner_5')
# Generic Methods
# Test Methods
def test_05_instructor_is_attendee(self):
'''
Check raise: "A session's instructor can't be an attendee"
'''
with self.assertRaisesRegexp(
ValidationError,
"A session's instructor can't be an attendee"):
self.session.create({
'name': 'Session Test 1',
'seats': 1,
'user_id': self.partner_vauxoo.id,
'attendee_ids': [(6, 0, [self.partner_vauxoo.id])],
'course_id': self.course_id.id
})
def test_10_wkf_done(self):
'''
Check that workflow work fine!
'''
session_test = self.session.create({
'name': 'Session Test 2',
'seats': 2,
'user_id': self.partner_vauxoo.id,
'attendee_ids': [(6, 0, [self.partner_attende.id])],
'course_id': self.course_id.id
})
# Check Initial State
self.assertEqual(session_test.state, 'draft', 'Initial state should '
'be in draft')
# Check next state an check it
session_test.signal_workflow('button_confirm')
self.assertEqual(session_test.state, 'confirmed', "Signal Confirm "
"don't work")
# Check next state an check it
session_test.signal_workflow('button_done')
self.assertEqual(session_test.state, 'done', "Signal Done don't work")
# self.env.cr.commit() Only for test data generated for test.
# Please don't use
|
normal
|
{
"blob_id": "7edd833103e1de92e57559c8a75379c26266963b",
"index": 7835,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass GlobalTestOpenAcademySession(TransactionCase):\n <mask token>\n\n def setUp(self):\n super(GlobalTestOpenAcademySession, self).setUp()\n self.session = self.env['openacademy.session']\n self.partner_vauxoo = self.env.ref('base.res_partner_23')\n self.course_id = self.env.ref('openacademy.course3')\n self.partner_attende = self.env.ref('base.res_partner_5')\n\n def test_05_instructor_is_attendee(self):\n \"\"\"\n Check raise: \"A session's instructor can't be an attendee\"\n \"\"\"\n with self.assertRaisesRegexp(ValidationError,\n \"A session's instructor can't be an attendee\"):\n self.session.create({'name': 'Session Test 1', 'seats': 1,\n 'user_id': self.partner_vauxoo.id, 'attendee_ids': [(6, 0,\n [self.partner_vauxoo.id])], 'course_id': self.course_id.id})\n\n def test_10_wkf_done(self):\n \"\"\"\n Check that workflow work fine!\n \"\"\"\n session_test = self.session.create({'name': 'Session Test 2',\n 'seats': 2, 'user_id': self.partner_vauxoo.id, 'attendee_ids':\n [(6, 0, [self.partner_attende.id])], 'course_id': self.\n course_id.id})\n self.assertEqual(session_test.state, 'draft',\n 'Initial state should be in draft')\n session_test.signal_workflow('button_confirm')\n self.assertEqual(session_test.state, 'confirmed',\n \"Signal Confirm don't work\")\n session_test.signal_workflow('button_done')\n self.assertEqual(session_test.state, 'done', \"Signal Done don't work\")\n",
"step-3": "<mask token>\n\n\nclass GlobalTestOpenAcademySession(TransactionCase):\n \"\"\"\n Global Test to openacademy session model.\n Test create session and trigger constraint\n \"\"\"\n\n def setUp(self):\n super(GlobalTestOpenAcademySession, self).setUp()\n self.session = self.env['openacademy.session']\n self.partner_vauxoo = self.env.ref('base.res_partner_23')\n self.course_id = self.env.ref('openacademy.course3')\n self.partner_attende = self.env.ref('base.res_partner_5')\n\n def test_05_instructor_is_attendee(self):\n \"\"\"\n Check raise: \"A session's instructor can't be an attendee\"\n \"\"\"\n with self.assertRaisesRegexp(ValidationError,\n \"A session's instructor can't be an attendee\"):\n self.session.create({'name': 'Session Test 1', 'seats': 1,\n 'user_id': self.partner_vauxoo.id, 'attendee_ids': [(6, 0,\n [self.partner_vauxoo.id])], 'course_id': self.course_id.id})\n\n def test_10_wkf_done(self):\n \"\"\"\n Check that workflow work fine!\n \"\"\"\n session_test = self.session.create({'name': 'Session Test 2',\n 'seats': 2, 'user_id': self.partner_vauxoo.id, 'attendee_ids':\n [(6, 0, [self.partner_attende.id])], 'course_id': self.\n course_id.id})\n self.assertEqual(session_test.state, 'draft',\n 'Initial state should be in draft')\n session_test.signal_workflow('button_confirm')\n self.assertEqual(session_test.state, 'confirmed',\n \"Signal Confirm don't work\")\n session_test.signal_workflow('button_done')\n self.assertEqual(session_test.state, 'done', \"Signal Done don't work\")\n",
"step-4": "from openerp.tests.common import TransactionCase\nfrom openerp.exceptions import ValidationError\n\n\nclass GlobalTestOpenAcademySession(TransactionCase):\n \"\"\"\n Global Test to openacademy session model.\n Test create session and trigger constraint\n \"\"\"\n\n def setUp(self):\n super(GlobalTestOpenAcademySession, self).setUp()\n self.session = self.env['openacademy.session']\n self.partner_vauxoo = self.env.ref('base.res_partner_23')\n self.course_id = self.env.ref('openacademy.course3')\n self.partner_attende = self.env.ref('base.res_partner_5')\n\n def test_05_instructor_is_attendee(self):\n \"\"\"\n Check raise: \"A session's instructor can't be an attendee\"\n \"\"\"\n with self.assertRaisesRegexp(ValidationError,\n \"A session's instructor can't be an attendee\"):\n self.session.create({'name': 'Session Test 1', 'seats': 1,\n 'user_id': self.partner_vauxoo.id, 'attendee_ids': [(6, 0,\n [self.partner_vauxoo.id])], 'course_id': self.course_id.id})\n\n def test_10_wkf_done(self):\n \"\"\"\n Check that workflow work fine!\n \"\"\"\n session_test = self.session.create({'name': 'Session Test 2',\n 'seats': 2, 'user_id': self.partner_vauxoo.id, 'attendee_ids':\n [(6, 0, [self.partner_attende.id])], 'course_id': self.\n course_id.id})\n self.assertEqual(session_test.state, 'draft',\n 'Initial state should be in draft')\n session_test.signal_workflow('button_confirm')\n self.assertEqual(session_test.state, 'confirmed',\n \"Signal Confirm don't work\")\n session_test.signal_workflow('button_done')\n self.assertEqual(session_test.state, 'done', \"Signal Done don't work\")\n",
"step-5": "# -*- encoding: utf-8 -*-\n\nfrom openerp.tests.common import TransactionCase\nfrom openerp.exceptions import ValidationError\n\n\nclass GlobalTestOpenAcademySession(TransactionCase):\n '''\n Global Test to openacademy session model.\n Test create session and trigger constraint\n '''\n\n # Pseudo-constructor methods\n def setUp(self):\n # Define Global Variable to tests methods\n super(GlobalTestOpenAcademySession, self).setUp()\n self.session = self.env['openacademy.session']\n self.partner_vauxoo = self.env.ref('base.res_partner_23')\n self.course_id = self.env.ref('openacademy.course3')\n self.partner_attende = self.env.ref('base.res_partner_5')\n\n # Generic Methods\n\n # Test Methods\n def test_05_instructor_is_attendee(self):\n '''\n Check raise: \"A session's instructor can't be an attendee\"\n '''\n with self.assertRaisesRegexp(\n ValidationError,\n \"A session's instructor can't be an attendee\"):\n self.session.create({\n 'name': 'Session Test 1',\n 'seats': 1,\n 'user_id': self.partner_vauxoo.id,\n 'attendee_ids': [(6, 0, [self.partner_vauxoo.id])],\n 'course_id': self.course_id.id\n })\n\n def test_10_wkf_done(self):\n '''\n Check that workflow work fine!\n '''\n session_test = self.session.create({\n 'name': 'Session Test 2',\n 'seats': 2,\n 'user_id': self.partner_vauxoo.id,\n 'attendee_ids': [(6, 0, [self.partner_attende.id])],\n 'course_id': self.course_id.id\n })\n # Check Initial State\n self.assertEqual(session_test.state, 'draft', 'Initial state should '\n 'be in draft')\n # Check next state an check it\n session_test.signal_workflow('button_confirm')\n self.assertEqual(session_test.state, 'confirmed', \"Signal Confirm \"\n \"don't work\")\n # Check next state an check it\n session_test.signal_workflow('button_done')\n self.assertEqual(session_test.state, 'done', \"Signal Done don't work\")\n # self.env.cr.commit() Only for test data generated for test.\n # Please don't use\n",
"step-ids": [
0,
4,
5,
6,
7
]
}
|
[
0,
4,
5,
6,
7
] |
"""
Given a random set of numbers, Print them in sorted order.
Example 1:
Input:
N = 4
arr[] = {1, 5, 3, 2}
Output: {1, 2, 3, 5}
Explanation: After sorting array will
be like {1, 2, 3, 5}.
"""
#complexity--> n*log n
def sortarray(arr):
for i in range(1,len(arr)):
key=arr[i]
j=i-1
while(j>=0 and arr[j]>key):
arr[j+1]=arr[j]
j-=1
arr[j+1]=key
return arr
print(sortarray([1,5,2,3]))
|
normal
|
{
"blob_id": "70cef88f3fe93d370e5d21a2b00b761ce530a099",
"index": 6366,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef sortarray(arr):\n for i in range(1, len(arr)):\n key = arr[i]\n j = i - 1\n while j >= 0 and arr[j] > key:\n arr[j + 1] = arr[j]\n j -= 1\n arr[j + 1] = key\n return arr\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef sortarray(arr):\n for i in range(1, len(arr)):\n key = arr[i]\n j = i - 1\n while j >= 0 and arr[j] > key:\n arr[j + 1] = arr[j]\n j -= 1\n arr[j + 1] = key\n return arr\n\n\nprint(sortarray([1, 5, 2, 3]))\n",
"step-4": "\"\"\"\nGiven a random set of numbers, Print them in sorted order.\n\nExample 1:\n\nInput:\nN = 4\narr[] = {1, 5, 3, 2}\nOutput: {1, 2, 3, 5}\nExplanation: After sorting array will \nbe like {1, 2, 3, 5}.\n\"\"\"\n#complexity--> n*log n\ndef sortarray(arr):\n for i in range(1,len(arr)):\n key=arr[i]\n j=i-1\n while(j>=0 and arr[j]>key):\n arr[j+1]=arr[j]\n j-=1\n arr[j+1]=key\n return arr\n\nprint(sortarray([1,5,2,3]))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class HashTable:
def __init__(self):
self.dados = []
def hash(self, chave):
return int(chave)
def __put(self, int, chave, valor):
self.dados.append({chave: valor})
"""
backup = dados
dados = novo_array(t * 2)
for elemento in backup:
hash = hash(elemento.chave)
__put(h % (t * 2), elemento.chave, elemento.valor)
"""
|
normal
|
{
"blob_id": "f14d46bedd5f6e0081a982251ad45e95860ef310",
"index": 209,
"step-1": "class HashTable:\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "class HashTable:\n <mask token>\n\n def hash(self, chave):\n return int(chave)\n <mask token>\n\n\n<mask token>\n",
"step-3": "class HashTable:\n <mask token>\n\n def hash(self, chave):\n return int(chave)\n\n def __put(self, int, chave, valor):\n self.dados.append({chave: valor})\n\n\n<mask token>\n",
"step-4": "class HashTable:\n\n def __init__(self):\n self.dados = []\n\n def hash(self, chave):\n return int(chave)\n\n def __put(self, int, chave, valor):\n self.dados.append({chave: valor})\n\n\n<mask token>\n",
"step-5": "class HashTable:\n def __init__(self):\n self.dados = []\n\n def hash(self, chave):\n return int(chave)\n \n def __put(self, int, chave, valor):\n self.dados.append({chave: valor})\n\n\n\"\"\"\nbackup = dados\ndados = novo_array(t * 2)\n\nfor elemento in backup:\n hash = hash(elemento.chave)\n __put(h % (t * 2), elemento.chave, elemento.valor)\n\"\"\"",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.shortcuts import render
from django.views.generic import TemplateView
from django.conf import settings
import os, csv
class InflationView(TemplateView):
template_name = 'inflation.html'
def get(self, request, *args, **kwargs):
# чтение csv-файла и заполнение контекста
context = {}
file_path = os.path.join(settings.BASE_DIR, 'inflation_russia.csv')
with open(file_path, newline='', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile, delimiter=';')
context['head'] = next(reader)
context['data'] = []
for row in reader:
context['data'].append(row)
return render(request, self.template_name, context)
|
normal
|
{
"blob_id": "6645887b25d75f4657fb231b80d8ebdec2bac7c9",
"index": 8718,
"step-1": "<mask token>\n\n\nclass InflationView(TemplateView):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass InflationView(TemplateView):\n <mask token>\n\n def get(self, request, *args, **kwargs):\n context = {}\n file_path = os.path.join(settings.BASE_DIR, 'inflation_russia.csv')\n with open(file_path, newline='', encoding='utf-8') as csvfile:\n reader = csv.reader(csvfile, delimiter=';')\n context['head'] = next(reader)\n context['data'] = []\n for row in reader:\n context['data'].append(row)\n return render(request, self.template_name, context)\n",
"step-3": "<mask token>\n\n\nclass InflationView(TemplateView):\n template_name = 'inflation.html'\n\n def get(self, request, *args, **kwargs):\n context = {}\n file_path = os.path.join(settings.BASE_DIR, 'inflation_russia.csv')\n with open(file_path, newline='', encoding='utf-8') as csvfile:\n reader = csv.reader(csvfile, delimiter=';')\n context['head'] = next(reader)\n context['data'] = []\n for row in reader:\n context['data'].append(row)\n return render(request, self.template_name, context)\n",
"step-4": "from django.shortcuts import render\nfrom django.views.generic import TemplateView\nfrom django.conf import settings\nimport os, csv\n\n\nclass InflationView(TemplateView):\n template_name = 'inflation.html'\n\n def get(self, request, *args, **kwargs):\n context = {}\n file_path = os.path.join(settings.BASE_DIR, 'inflation_russia.csv')\n with open(file_path, newline='', encoding='utf-8') as csvfile:\n reader = csv.reader(csvfile, delimiter=';')\n context['head'] = next(reader)\n context['data'] = []\n for row in reader:\n context['data'].append(row)\n return render(request, self.template_name, context)\n",
"step-5": "from django.shortcuts import render\nfrom django.views.generic import TemplateView\nfrom django.conf import settings\nimport os, csv\n\n\nclass InflationView(TemplateView):\n template_name = 'inflation.html'\n\n def get(self, request, *args, **kwargs):\n # чтение csv-файла и заполнение контекста\n context = {}\n file_path = os.path.join(settings.BASE_DIR, 'inflation_russia.csv')\n with open(file_path, newline='', encoding='utf-8') as csvfile:\n reader = csv.reader(csvfile, delimiter=';')\n context['head'] = next(reader)\n context['data'] = []\n for row in reader:\n context['data'].append(row)\n return render(request, self.template_name, context)\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django import forms
from .models import Diagnosis, TODOItem
class DiagnosisForm(forms.ModelForm):
class Meta:
model = Diagnosis
fields = ['name', 'Rostered_physician', 'condition', 'details',
'date_of_diagnosis', 'content']
class TODOItemForm(forms.ModelForm):
class Meta:
model = TODOItem
fields = ['job', 'due_date', 'medication_details', 'completed']
|
normal
|
{
"blob_id": "aa6464c53176be9d89c6c06997001da2b3ee1e5c",
"index": 6583,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass TODOItemForm(forms.ModelForm):\n\n\n class Meta:\n model = TODOItem\n fields = ['job', 'due_date', 'medication_details', 'completed']\n",
"step-3": "<mask token>\n\n\nclass DiagnosisForm(forms.ModelForm):\n\n\n class Meta:\n model = Diagnosis\n fields = ['name', 'Rostered_physician', 'condition', 'details',\n 'date_of_diagnosis', 'content']\n\n\nclass TODOItemForm(forms.ModelForm):\n\n\n class Meta:\n model = TODOItem\n fields = ['job', 'due_date', 'medication_details', 'completed']\n",
"step-4": "from django import forms\nfrom .models import Diagnosis, TODOItem\n\n\nclass DiagnosisForm(forms.ModelForm):\n\n\n class Meta:\n model = Diagnosis\n fields = ['name', 'Rostered_physician', 'condition', 'details',\n 'date_of_diagnosis', 'content']\n\n\nclass TODOItemForm(forms.ModelForm):\n\n\n class Meta:\n model = TODOItem\n fields = ['job', 'due_date', 'medication_details', 'completed']\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
string="Rutuja MaluSare"
print(string.casefold())
print(len(string))
"""string1=input("enter string 1")
print("string1")
print(len(string1))
string2=input("enter string 2")
print("string2")
print(len(string2))
string3=string1+string2
print(len(string3))"""
#lower case
print(string.lower())
#upper case
print(string.upper())
#strip =removes white spaces from start and end
a=" hello "
print(a)
print(a.strip())
#isdigit
b= 12
print(b)
|
normal
|
{
"blob_id": "024bc95f7255bb8be5c3c4ade9d212c9555a4f01",
"index": 3034,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(string.casefold())\nprint(len(string))\n<mask token>\nprint(string.lower())\nprint(string.upper())\n<mask token>\nprint(a)\nprint(a.strip())\n<mask token>\nprint(b)\n",
"step-3": "string = 'Rutuja MaluSare'\nprint(string.casefold())\nprint(len(string))\n<mask token>\nprint(string.lower())\nprint(string.upper())\na = ' hello '\nprint(a)\nprint(a.strip())\nb = 12\nprint(b)\n",
"step-4": "string=\"Rutuja MaluSare\"\nprint(string.casefold())\nprint(len(string))\n\n\"\"\"string1=input(\"enter string 1\")\nprint(\"string1\")\nprint(len(string1))\n\nstring2=input(\"enter string 2\")\nprint(\"string2\")\nprint(len(string2))\n\n\nstring3=string1+string2\nprint(len(string3))\"\"\"\n\n#lower case\nprint(string.lower())\n#upper case\nprint(string.upper())\n\n#strip =removes white spaces from start and end\na=\" hello \"\nprint(a)\nprint(a.strip())\n\n#isdigit\nb= 12\nprint(b)\n\n\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class RDt(RPackage):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
version('0.23', sha256=
'360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')
version('0.20', sha256=
'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')
version('0.17', sha256=
'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')
version('0.13', sha256=
'79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')
version('0.8', sha256=
'90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')
version('0.7', sha256=
'1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')
version('0.6', sha256=
'2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')
version('0.4', sha256=
'3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')
version('0.3', sha256=
'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')
version('0.2', sha256=
'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')
version('0.1', sha256=
'129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')
depends_on('r-htmltools@0.3.6:', type=('build', 'run'))
depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))
depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-crosstalk', type=('build', 'run'))
depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')
depends_on('r-promises', type=('build', 'run'), when='@0.5:')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RDt(RPackage):
<|reserved_special_token_0|>
cran = 'DT'
version('0.23', sha256=
'360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')
version('0.20', sha256=
'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')
version('0.17', sha256=
'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')
version('0.13', sha256=
'79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')
version('0.8', sha256=
'90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')
version('0.7', sha256=
'1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')
version('0.6', sha256=
'2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')
version('0.4', sha256=
'3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')
version('0.3', sha256=
'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')
version('0.2', sha256=
'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')
version('0.1', sha256=
'129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')
depends_on('r-htmltools@0.3.6:', type=('build', 'run'))
depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))
depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-crosstalk', type=('build', 'run'))
depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')
depends_on('r-promises', type=('build', 'run'), when='@0.5:')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RDt(RPackage):
"""A Wrapper of the JavaScript Library 'DataTables'.
Data objects in R can be rendered as HTML tables using the JavaScript
library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables'
library has been included in this R package. The package name 'DT' is an
abbreviation of 'DataTables'."""
cran = 'DT'
version('0.23', sha256=
'360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')
version('0.20', sha256=
'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')
version('0.17', sha256=
'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')
version('0.13', sha256=
'79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')
version('0.8', sha256=
'90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')
version('0.7', sha256=
'1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')
version('0.6', sha256=
'2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')
version('0.4', sha256=
'3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')
version('0.3', sha256=
'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')
version('0.2', sha256=
'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')
version('0.1', sha256=
'129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')
depends_on('r-htmltools@0.3.6:', type=('build', 'run'))
depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))
depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-crosstalk', type=('build', 'run'))
depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')
depends_on('r-promises', type=('build', 'run'), when='@0.5:')
<|reserved_special_token_1|>
from spack.package import *
class RDt(RPackage):
"""A Wrapper of the JavaScript Library 'DataTables'.
Data objects in R can be rendered as HTML tables using the JavaScript
library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables'
library has been included in this R package. The package name 'DT' is an
abbreviation of 'DataTables'."""
cran = 'DT'
version('0.23', sha256=
'360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')
version('0.20', sha256=
'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')
version('0.17', sha256=
'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')
version('0.13', sha256=
'79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')
version('0.8', sha256=
'90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')
version('0.7', sha256=
'1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')
version('0.6', sha256=
'2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')
version('0.4', sha256=
'3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')
version('0.3', sha256=
'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')
version('0.2', sha256=
'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')
version('0.1', sha256=
'129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')
depends_on('r-htmltools@0.3.6:', type=('build', 'run'))
depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))
depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-crosstalk', type=('build', 'run'))
depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')
depends_on('r-promises', type=('build', 'run'), when='@0.5:')
<|reserved_special_token_1|>
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RDt(RPackage):
"""A Wrapper of the JavaScript Library 'DataTables'.
Data objects in R can be rendered as HTML tables using the JavaScript
library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables'
library has been included in this R package. The package name 'DT' is an
abbreviation of 'DataTables'."""
cran = "DT"
version("0.23", sha256="360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70")
version("0.20", sha256="c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f")
version("0.17", sha256="e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56")
version("0.13", sha256="79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5")
version("0.8", sha256="90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21")
version("0.7", sha256="1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c")
version("0.6", sha256="2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916")
version("0.4", sha256="3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19")
version("0.3", sha256="ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb")
version("0.2", sha256="a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd")
version("0.1", sha256="129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756")
depends_on("r-htmltools@0.3.6:", type=("build", "run"))
depends_on("r-htmlwidgets@1.3:", type=("build", "run"))
depends_on("r-jsonlite@0.9.16:", type=("build", "run"), when="@0.8:")
depends_on("r-magrittr", type=("build", "run"))
depends_on("r-crosstalk", type=("build", "run"))
depends_on("r-jquerylib", type=("build", "run"), when="@0.19:")
depends_on("r-promises", type=("build", "run"), when="@0.5:")
|
flexible
|
{
"blob_id": "c88e2336432f93d95b4e2285aa532b673a4a410b",
"index": 1095,
"step-1": "<mask token>\n\n\nclass RDt(RPackage):\n <mask token>\n <mask token>\n version('0.23', sha256=\n '360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')\n version('0.20', sha256=\n 'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')\n version('0.17', sha256=\n 'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')\n version('0.13', sha256=\n '79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')\n version('0.8', sha256=\n '90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')\n version('0.7', sha256=\n '1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')\n version('0.6', sha256=\n '2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')\n version('0.4', sha256=\n '3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')\n version('0.3', sha256=\n 'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')\n version('0.2', sha256=\n 'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')\n version('0.1', sha256=\n '129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')\n depends_on('r-htmltools@0.3.6:', type=('build', 'run'))\n depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))\n depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')\n depends_on('r-magrittr', type=('build', 'run'))\n depends_on('r-crosstalk', type=('build', 'run'))\n depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')\n depends_on('r-promises', type=('build', 'run'), when='@0.5:')\n",
"step-2": "<mask token>\n\n\nclass RDt(RPackage):\n <mask token>\n cran = 'DT'\n version('0.23', sha256=\n '360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')\n version('0.20', sha256=\n 'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')\n version('0.17', sha256=\n 'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')\n version('0.13', sha256=\n '79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')\n version('0.8', sha256=\n '90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')\n version('0.7', sha256=\n '1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')\n version('0.6', sha256=\n '2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')\n version('0.4', sha256=\n '3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')\n version('0.3', sha256=\n 'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')\n version('0.2', sha256=\n 'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')\n version('0.1', sha256=\n '129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')\n depends_on('r-htmltools@0.3.6:', type=('build', 'run'))\n depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))\n depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')\n depends_on('r-magrittr', type=('build', 'run'))\n depends_on('r-crosstalk', type=('build', 'run'))\n depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')\n depends_on('r-promises', type=('build', 'run'), when='@0.5:')\n",
"step-3": "<mask token>\n\n\nclass RDt(RPackage):\n \"\"\"A Wrapper of the JavaScript Library 'DataTables'.\n\n Data objects in R can be rendered as HTML tables using the JavaScript\n library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables'\n library has been included in this R package. The package name 'DT' is an\n abbreviation of 'DataTables'.\"\"\"\n cran = 'DT'\n version('0.23', sha256=\n '360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')\n version('0.20', sha256=\n 'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')\n version('0.17', sha256=\n 'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')\n version('0.13', sha256=\n '79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')\n version('0.8', sha256=\n '90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')\n version('0.7', sha256=\n '1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')\n version('0.6', sha256=\n '2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')\n version('0.4', sha256=\n '3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')\n version('0.3', sha256=\n 'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')\n version('0.2', sha256=\n 'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')\n version('0.1', sha256=\n '129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')\n depends_on('r-htmltools@0.3.6:', type=('build', 'run'))\n depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))\n depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')\n depends_on('r-magrittr', type=('build', 'run'))\n depends_on('r-crosstalk', type=('build', 'run'))\n depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')\n depends_on('r-promises', type=('build', 'run'), when='@0.5:')\n",
"step-4": "from spack.package import *\n\n\nclass RDt(RPackage):\n \"\"\"A Wrapper of the JavaScript Library 'DataTables'.\n\n Data objects in R can be rendered as HTML tables using the JavaScript\n library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables'\n library has been included in this R package. The package name 'DT' is an\n abbreviation of 'DataTables'.\"\"\"\n cran = 'DT'\n version('0.23', sha256=\n '360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70')\n version('0.20', sha256=\n 'c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')\n version('0.17', sha256=\n 'e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')\n version('0.13', sha256=\n '79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')\n version('0.8', sha256=\n '90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')\n version('0.7', sha256=\n '1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c')\n version('0.6', sha256=\n '2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916')\n version('0.4', sha256=\n '3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19')\n version('0.3', sha256=\n 'ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb')\n version('0.2', sha256=\n 'a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd')\n version('0.1', sha256=\n '129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756')\n depends_on('r-htmltools@0.3.6:', type=('build', 'run'))\n depends_on('r-htmlwidgets@1.3:', type=('build', 'run'))\n depends_on('r-jsonlite@0.9.16:', type=('build', 'run'), when='@0.8:')\n depends_on('r-magrittr', type=('build', 'run'))\n depends_on('r-crosstalk', type=('build', 'run'))\n depends_on('r-jquerylib', type=('build', 'run'), when='@0.19:')\n depends_on('r-promises', type=('build', 'run'), when='@0.5:')\n",
"step-5": "# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack.package import *\n\n\nclass RDt(RPackage):\n \"\"\"A Wrapper of the JavaScript Library 'DataTables'.\n\n Data objects in R can be rendered as HTML tables using the JavaScript\n library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables'\n library has been included in this R package. The package name 'DT' is an\n abbreviation of 'DataTables'.\"\"\"\n\n cran = \"DT\"\n\n version(\"0.23\", sha256=\"360ae2fcb1141125a1b16448570fc37d14c4dd3f78a872c26df4fda1787cdc70\")\n version(\"0.20\", sha256=\"c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f\")\n version(\"0.17\", sha256=\"e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56\")\n version(\"0.13\", sha256=\"79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5\")\n version(\"0.8\", sha256=\"90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21\")\n version(\"0.7\", sha256=\"1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c\")\n version(\"0.6\", sha256=\"2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916\")\n version(\"0.4\", sha256=\"3daa96b819ca54e5fbc2c7d78cb3637982a2d44be58cea0683663b71cfc7fa19\")\n version(\"0.3\", sha256=\"ef42b24c9ea6cfa1ce089687bf858d773ac495dc80756d4475234e979bd437eb\")\n version(\"0.2\", sha256=\"a1b7f9e5c31a241fdf78ac582499f346e915ff948554980bbc2262c924b806bd\")\n version(\"0.1\", sha256=\"129bdafededbdcc3279d63b16f00c885b215f23cab2edfe33c9cbe177c8c4756\")\n\n depends_on(\"r-htmltools@0.3.6:\", type=(\"build\", \"run\"))\n depends_on(\"r-htmlwidgets@1.3:\", type=(\"build\", \"run\"))\n depends_on(\"r-jsonlite@0.9.16:\", type=(\"build\", \"run\"), when=\"@0.8:\")\n depends_on(\"r-magrittr\", type=(\"build\", \"run\"))\n depends_on(\"r-crosstalk\", type=(\"build\", \"run\"))\n depends_on(\"r-jquerylib\", type=(\"build\", \"run\"), when=\"@0.19:\")\n depends_on(\"r-promises\", type=(\"build\", \"run\"), when=\"@0.5:\")\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class Write:
def __writeHeader(fd):
"""Write html header"""
print('<!DOCTYPE html>', '<html>', ' <head>',
' <title>Super Tableau 3000</title>',
" <meta charset='utf-8' />", ' <style>',
' table { border-collapse: collapse; }',
' td { border: solid; }', ' h4, li { font-size:10px; }',
' .empty { border: 0px; }', ' </style>', ' </head>',
' <body>', ' <table>', sep='\n', file=fd)
def __writeFooter(fd):
"""Write html footer"""
print(' </table>', ' </body>', '</html>', sep='\n', file=fd)
def __openRow(fd):
"""Write opening html table row"""
print(' <tr>', file=fd)
def __closeRow(fd):
"""Write closing html table row"""
print(' </tr>', file=fd)
def __writeElement(fd, elm):
"""Write html table cell"""
print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',
' <li>' + str(elm['number']) + '</li>', ' <li>' + elm
['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',
' </ul>', ' </td>', sep='\n', file=fd)
def __writeEmptyElement(fd):
"""Write html empty table cell"""
print(" <td class='empty'></td>", file=fd)
def writeFile(filename):
"""Write our awesome html file"""
with open(filename, 'w') as f:
Write.__writeHeader(f)
Write.__openRow(f)
i = 0
for elm in Parse.data:
while i != elm['position']:
Write.__writeEmptyElement(f)
i += 1
Write.__writeElement(f, elm)
i += 1
if elm['position'] == 17:
i = 0
Write.__closeRow(f)
if elm['number'] != 118:
Write.__openRow(f)
Write.__writeFooter(f)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Parse:
<|reserved_special_token_0|>
def __parseLine(line):
"""Parse the given line"""
name_len = line.index(' ')
name = line[:name_len]
line = line[name_len + 3:]
elec_pos = line.index('electron') + 9
line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'
line = line.replace(' ', '')
line = line.replace('small:', 'small:"').replace(',molar', '",molar')
for i in ['position', 'number', 'small', 'molar', 'electron']:
line = line.replace(i, '"' + i + '"')
return eval('{"name":"' + name + '",' + line + '}')
def parseFile(filename):
"""Parse the given file"""
Parse.data = []
with open(filename, 'r') as f:
for line in f:
Parse.data += [Parse.__parseLine(line)]
return Parse.data
class Write:
def __writeHeader(fd):
"""Write html header"""
print('<!DOCTYPE html>', '<html>', ' <head>',
' <title>Super Tableau 3000</title>',
" <meta charset='utf-8' />", ' <style>',
' table { border-collapse: collapse; }',
' td { border: solid; }', ' h4, li { font-size:10px; }',
' .empty { border: 0px; }', ' </style>', ' </head>',
' <body>', ' <table>', sep='\n', file=fd)
def __writeFooter(fd):
"""Write html footer"""
print(' </table>', ' </body>', '</html>', sep='\n', file=fd)
def __openRow(fd):
"""Write opening html table row"""
print(' <tr>', file=fd)
def __closeRow(fd):
"""Write closing html table row"""
print(' </tr>', file=fd)
def __writeElement(fd, elm):
"""Write html table cell"""
print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',
' <li>' + str(elm['number']) + '</li>', ' <li>' + elm
['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',
' </ul>', ' </td>', sep='\n', file=fd)
def __writeEmptyElement(fd):
"""Write html empty table cell"""
print(" <td class='empty'></td>", file=fd)
def writeFile(filename):
"""Write our awesome html file"""
with open(filename, 'w') as f:
Write.__writeHeader(f)
Write.__openRow(f)
i = 0
for elm in Parse.data:
while i != elm['position']:
Write.__writeEmptyElement(f)
i += 1
Write.__writeElement(f, elm)
i += 1
if elm['position'] == 17:
i = 0
Write.__closeRow(f)
if elm['number'] != 118:
Write.__openRow(f)
Write.__writeFooter(f)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Parse:
data = []
def __parseLine(line):
"""Parse the given line"""
name_len = line.index(' ')
name = line[:name_len]
line = line[name_len + 3:]
elec_pos = line.index('electron') + 9
line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'
line = line.replace(' ', '')
line = line.replace('small:', 'small:"').replace(',molar', '",molar')
for i in ['position', 'number', 'small', 'molar', 'electron']:
line = line.replace(i, '"' + i + '"')
return eval('{"name":"' + name + '",' + line + '}')
def parseFile(filename):
"""Parse the given file"""
Parse.data = []
with open(filename, 'r') as f:
for line in f:
Parse.data += [Parse.__parseLine(line)]
return Parse.data
class Write:
def __writeHeader(fd):
"""Write html header"""
print('<!DOCTYPE html>', '<html>', ' <head>',
' <title>Super Tableau 3000</title>',
" <meta charset='utf-8' />", ' <style>',
' table { border-collapse: collapse; }',
' td { border: solid; }', ' h4, li { font-size:10px; }',
' .empty { border: 0px; }', ' </style>', ' </head>',
' <body>', ' <table>', sep='\n', file=fd)
def __writeFooter(fd):
"""Write html footer"""
print(' </table>', ' </body>', '</html>', sep='\n', file=fd)
def __openRow(fd):
"""Write opening html table row"""
print(' <tr>', file=fd)
def __closeRow(fd):
"""Write closing html table row"""
print(' </tr>', file=fd)
def __writeElement(fd, elm):
"""Write html table cell"""
print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',
' <li>' + str(elm['number']) + '</li>', ' <li>' + elm
['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',
' </ul>', ' </td>', sep='\n', file=fd)
def __writeEmptyElement(fd):
"""Write html empty table cell"""
print(" <td class='empty'></td>", file=fd)
def writeFile(filename):
"""Write our awesome html file"""
with open(filename, 'w') as f:
Write.__writeHeader(f)
Write.__openRow(f)
i = 0
for elm in Parse.data:
while i != elm['position']:
Write.__writeEmptyElement(f)
i += 1
Write.__writeElement(f, elm)
i += 1
if elm['position'] == 17:
i = 0
Write.__closeRow(f)
if elm['number'] != 118:
Write.__openRow(f)
Write.__writeFooter(f)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Parse:
data = []
def __parseLine(line):
"""Parse the given line"""
name_len = line.index(' ')
name = line[:name_len]
line = line[name_len + 3:]
elec_pos = line.index('electron') + 9
line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'
line = line.replace(' ', '')
line = line.replace('small:', 'small:"').replace(',molar', '",molar')
for i in ['position', 'number', 'small', 'molar', 'electron']:
line = line.replace(i, '"' + i + '"')
return eval('{"name":"' + name + '",' + line + '}')
def parseFile(filename):
"""Parse the given file"""
Parse.data = []
with open(filename, 'r') as f:
for line in f:
Parse.data += [Parse.__parseLine(line)]
return Parse.data
class Write:
def __writeHeader(fd):
"""Write html header"""
print('<!DOCTYPE html>', '<html>', ' <head>',
' <title>Super Tableau 3000</title>',
" <meta charset='utf-8' />", ' <style>',
' table { border-collapse: collapse; }',
' td { border: solid; }', ' h4, li { font-size:10px; }',
' .empty { border: 0px; }', ' </style>', ' </head>',
' <body>', ' <table>', sep='\n', file=fd)
def __writeFooter(fd):
"""Write html footer"""
print(' </table>', ' </body>', '</html>', sep='\n', file=fd)
def __openRow(fd):
"""Write opening html table row"""
print(' <tr>', file=fd)
def __closeRow(fd):
"""Write closing html table row"""
print(' </tr>', file=fd)
def __writeElement(fd, elm):
"""Write html table cell"""
print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',
' <li>' + str(elm['number']) + '</li>', ' <li>' + elm
['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',
' </ul>', ' </td>', sep='\n', file=fd)
def __writeEmptyElement(fd):
"""Write html empty table cell"""
print(" <td class='empty'></td>", file=fd)
def writeFile(filename):
"""Write our awesome html file"""
with open(filename, 'w') as f:
Write.__writeHeader(f)
Write.__openRow(f)
i = 0
for elm in Parse.data:
while i != elm['position']:
Write.__writeEmptyElement(f)
i += 1
Write.__writeElement(f, elm)
i += 1
if elm['position'] == 17:
i = 0
Write.__closeRow(f)
if elm['number'] != 118:
Write.__openRow(f)
Write.__writeFooter(f)
def doTheJob(input_file):
"""Do all we need"""
Parse.parseFile(input_file)
Write.writeFile(input_file.replace('.txt', '.html'))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
#!/usr/bin/env python3
import sys
class Parse:
data = []
def __parseLine(line):
"""Parse the given line"""
# extract name
name_len = line.index(" ")
name = line[:name_len]
line = line[name_len + 3:]
# array-ize 'electron' val
elec_pos = line.index("electron") + 9
line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'
# quote 'small' val
line = line.replace(' ', '')
line = line.replace('small:', 'small:"').replace(',molar', '",molar')
# quote all keys
for i in ["position", "number", "small", "molar", "electron"]:
line = line.replace(i, '"' + i + '"')
return eval('{"name":"' + name + '",' + line + '}')
def parseFile(filename):
"""Parse the given file"""
Parse.data = []
with open(filename, "r") as f:
for line in f:
Parse.data += [Parse.__parseLine(line)]
return Parse.data
class Write:
def __writeHeader(fd):
"""Write html header"""
print(
"<!DOCTYPE html>",
"<html>",
" <head>",
" <title>Super Tableau 3000</title>",
" <meta charset='utf-8' />",
" <style>", # ty alex for css!
" table { border-collapse: collapse; }",
" td { border: solid; }",
" h4, li { font-size:10px; }",
" .empty { border: 0px; }",
" </style>",
" </head>",
" <body>",
" <table>",
sep="\n",
file=fd
)
def __writeFooter(fd):
"""Write html footer"""
print(
" </table>",
" </body>",
"</html>",
sep="\n",
file=fd
)
def __openRow(fd):
"""Write opening html table row"""
print(" <tr>", file=fd)
def __closeRow(fd):
"""Write closing html table row"""
print(" </tr>", file=fd)
def __writeElement(fd, elm):
"""Write html table cell"""
print(
" <td>",
" <h4>" + elm["name"] + "</h4>",
" <ul>",
" <li>" + str(elm["number"]) + "</li>",
" <li>" + elm["small"] + "</li>",
" <li>" + str(elm["molar"]) + "</li>",
" </ul>",
" </td>",
sep="\n",
file=fd
)
def __writeEmptyElement(fd):
"""Write html empty table cell"""
print(" <td class='empty'></td>", file=fd)
def writeFile(filename):
"""Write our awesome html file"""
with open(filename, "w") as f:
Write.__writeHeader(f)
Write.__openRow(f)
i = 0
for elm in Parse.data:
while i != elm["position"]:
Write.__writeEmptyElement(f)
i += 1
Write.__writeElement(f, elm)
i += 1
if elm["position"] == 17:
i = 0
Write.__closeRow(f)
if elm["number"] != 118:
Write.__openRow(f)
Write.__writeFooter(f)
def doTheJob(input_file):
"""Do all we need"""
Parse.parseFile(input_file)
Write.writeFile(input_file.replace(".txt", ".html"))
if __name__ == '__main__':
if len(sys.argv) == 2:
doTheJob(sys.argv[1])
else:
doTheJob("./ex07/periodic_table.txt")
|
flexible
|
{
"blob_id": "cb77696a90716acdee83a1cf6162a8f42c524e11",
"index": 7612,
"step-1": "<mask token>\n\n\nclass Write:\n\n def __writeHeader(fd):\n \"\"\"Write html header\"\"\"\n print('<!DOCTYPE html>', '<html>', ' <head>',\n ' <title>Super Tableau 3000</title>',\n \" <meta charset='utf-8' />\", ' <style>',\n ' table { border-collapse: collapse; }',\n ' td { border: solid; }', ' h4, li { font-size:10px; }',\n ' .empty { border: 0px; }', ' </style>', ' </head>',\n ' <body>', ' <table>', sep='\\n', file=fd)\n\n def __writeFooter(fd):\n \"\"\"Write html footer\"\"\"\n print(' </table>', ' </body>', '</html>', sep='\\n', file=fd)\n\n def __openRow(fd):\n \"\"\"Write opening html table row\"\"\"\n print(' <tr>', file=fd)\n\n def __closeRow(fd):\n \"\"\"Write closing html table row\"\"\"\n print(' </tr>', file=fd)\n\n def __writeElement(fd, elm):\n \"\"\"Write html table cell\"\"\"\n print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',\n ' <li>' + str(elm['number']) + '</li>', ' <li>' + elm\n ['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',\n ' </ul>', ' </td>', sep='\\n', file=fd)\n\n def __writeEmptyElement(fd):\n \"\"\"Write html empty table cell\"\"\"\n print(\" <td class='empty'></td>\", file=fd)\n\n def writeFile(filename):\n \"\"\"Write our awesome html file\"\"\"\n with open(filename, 'w') as f:\n Write.__writeHeader(f)\n Write.__openRow(f)\n i = 0\n for elm in Parse.data:\n while i != elm['position']:\n Write.__writeEmptyElement(f)\n i += 1\n Write.__writeElement(f, elm)\n i += 1\n if elm['position'] == 17:\n i = 0\n Write.__closeRow(f)\n if elm['number'] != 118:\n Write.__openRow(f)\n Write.__writeFooter(f)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Parse:\n <mask token>\n\n def __parseLine(line):\n \"\"\"Parse the given line\"\"\"\n name_len = line.index(' ')\n name = line[:name_len]\n line = line[name_len + 3:]\n elec_pos = line.index('electron') + 9\n line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'\n line = line.replace(' ', '')\n line = line.replace('small:', 'small:\"').replace(',molar', '\",molar')\n for i in ['position', 'number', 'small', 'molar', 'electron']:\n line = line.replace(i, '\"' + i + '\"')\n return eval('{\"name\":\"' + name + '\",' + line + '}')\n\n def parseFile(filename):\n \"\"\"Parse the given file\"\"\"\n Parse.data = []\n with open(filename, 'r') as f:\n for line in f:\n Parse.data += [Parse.__parseLine(line)]\n return Parse.data\n\n\nclass Write:\n\n def __writeHeader(fd):\n \"\"\"Write html header\"\"\"\n print('<!DOCTYPE html>', '<html>', ' <head>',\n ' <title>Super Tableau 3000</title>',\n \" <meta charset='utf-8' />\", ' <style>',\n ' table { border-collapse: collapse; }',\n ' td { border: solid; }', ' h4, li { font-size:10px; }',\n ' .empty { border: 0px; }', ' </style>', ' </head>',\n ' <body>', ' <table>', sep='\\n', file=fd)\n\n def __writeFooter(fd):\n \"\"\"Write html footer\"\"\"\n print(' </table>', ' </body>', '</html>', sep='\\n', file=fd)\n\n def __openRow(fd):\n \"\"\"Write opening html table row\"\"\"\n print(' <tr>', file=fd)\n\n def __closeRow(fd):\n \"\"\"Write closing html table row\"\"\"\n print(' </tr>', file=fd)\n\n def __writeElement(fd, elm):\n \"\"\"Write html table cell\"\"\"\n print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',\n ' <li>' + str(elm['number']) + '</li>', ' <li>' + elm\n ['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',\n ' </ul>', ' </td>', sep='\\n', file=fd)\n\n def __writeEmptyElement(fd):\n \"\"\"Write html empty table cell\"\"\"\n print(\" <td class='empty'></td>\", file=fd)\n\n def writeFile(filename):\n \"\"\"Write our awesome html file\"\"\"\n with open(filename, 'w') as f:\n Write.__writeHeader(f)\n Write.__openRow(f)\n i = 0\n for elm in Parse.data:\n while i != elm['position']:\n Write.__writeEmptyElement(f)\n i += 1\n Write.__writeElement(f, elm)\n i += 1\n if elm['position'] == 17:\n i = 0\n Write.__closeRow(f)\n if elm['number'] != 118:\n Write.__openRow(f)\n Write.__writeFooter(f)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Parse:\n data = []\n\n def __parseLine(line):\n \"\"\"Parse the given line\"\"\"\n name_len = line.index(' ')\n name = line[:name_len]\n line = line[name_len + 3:]\n elec_pos = line.index('electron') + 9\n line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'\n line = line.replace(' ', '')\n line = line.replace('small:', 'small:\"').replace(',molar', '\",molar')\n for i in ['position', 'number', 'small', 'molar', 'electron']:\n line = line.replace(i, '\"' + i + '\"')\n return eval('{\"name\":\"' + name + '\",' + line + '}')\n\n def parseFile(filename):\n \"\"\"Parse the given file\"\"\"\n Parse.data = []\n with open(filename, 'r') as f:\n for line in f:\n Parse.data += [Parse.__parseLine(line)]\n return Parse.data\n\n\nclass Write:\n\n def __writeHeader(fd):\n \"\"\"Write html header\"\"\"\n print('<!DOCTYPE html>', '<html>', ' <head>',\n ' <title>Super Tableau 3000</title>',\n \" <meta charset='utf-8' />\", ' <style>',\n ' table { border-collapse: collapse; }',\n ' td { border: solid; }', ' h4, li { font-size:10px; }',\n ' .empty { border: 0px; }', ' </style>', ' </head>',\n ' <body>', ' <table>', sep='\\n', file=fd)\n\n def __writeFooter(fd):\n \"\"\"Write html footer\"\"\"\n print(' </table>', ' </body>', '</html>', sep='\\n', file=fd)\n\n def __openRow(fd):\n \"\"\"Write opening html table row\"\"\"\n print(' <tr>', file=fd)\n\n def __closeRow(fd):\n \"\"\"Write closing html table row\"\"\"\n print(' </tr>', file=fd)\n\n def __writeElement(fd, elm):\n \"\"\"Write html table cell\"\"\"\n print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',\n ' <li>' + str(elm['number']) + '</li>', ' <li>' + elm\n ['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',\n ' </ul>', ' </td>', sep='\\n', file=fd)\n\n def __writeEmptyElement(fd):\n \"\"\"Write html empty table cell\"\"\"\n print(\" <td class='empty'></td>\", file=fd)\n\n def writeFile(filename):\n \"\"\"Write our awesome html file\"\"\"\n with open(filename, 'w') as f:\n Write.__writeHeader(f)\n Write.__openRow(f)\n i = 0\n for elm in Parse.data:\n while i != elm['position']:\n Write.__writeEmptyElement(f)\n i += 1\n Write.__writeElement(f, elm)\n i += 1\n if elm['position'] == 17:\n i = 0\n Write.__closeRow(f)\n if elm['number'] != 118:\n Write.__openRow(f)\n Write.__writeFooter(f)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Parse:\n data = []\n\n def __parseLine(line):\n \"\"\"Parse the given line\"\"\"\n name_len = line.index(' ')\n name = line[:name_len]\n line = line[name_len + 3:]\n elec_pos = line.index('electron') + 9\n line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'\n line = line.replace(' ', '')\n line = line.replace('small:', 'small:\"').replace(',molar', '\",molar')\n for i in ['position', 'number', 'small', 'molar', 'electron']:\n line = line.replace(i, '\"' + i + '\"')\n return eval('{\"name\":\"' + name + '\",' + line + '}')\n\n def parseFile(filename):\n \"\"\"Parse the given file\"\"\"\n Parse.data = []\n with open(filename, 'r') as f:\n for line in f:\n Parse.data += [Parse.__parseLine(line)]\n return Parse.data\n\n\nclass Write:\n\n def __writeHeader(fd):\n \"\"\"Write html header\"\"\"\n print('<!DOCTYPE html>', '<html>', ' <head>',\n ' <title>Super Tableau 3000</title>',\n \" <meta charset='utf-8' />\", ' <style>',\n ' table { border-collapse: collapse; }',\n ' td { border: solid; }', ' h4, li { font-size:10px; }',\n ' .empty { border: 0px; }', ' </style>', ' </head>',\n ' <body>', ' <table>', sep='\\n', file=fd)\n\n def __writeFooter(fd):\n \"\"\"Write html footer\"\"\"\n print(' </table>', ' </body>', '</html>', sep='\\n', file=fd)\n\n def __openRow(fd):\n \"\"\"Write opening html table row\"\"\"\n print(' <tr>', file=fd)\n\n def __closeRow(fd):\n \"\"\"Write closing html table row\"\"\"\n print(' </tr>', file=fd)\n\n def __writeElement(fd, elm):\n \"\"\"Write html table cell\"\"\"\n print(' <td>', ' <h4>' + elm['name'] + '</h4>', ' <ul>',\n ' <li>' + str(elm['number']) + '</li>', ' <li>' + elm\n ['small'] + '</li>', ' <li>' + str(elm['molar']) + '</li>',\n ' </ul>', ' </td>', sep='\\n', file=fd)\n\n def __writeEmptyElement(fd):\n \"\"\"Write html empty table cell\"\"\"\n print(\" <td class='empty'></td>\", file=fd)\n\n def writeFile(filename):\n \"\"\"Write our awesome html file\"\"\"\n with open(filename, 'w') as f:\n Write.__writeHeader(f)\n Write.__openRow(f)\n i = 0\n for elm in Parse.data:\n while i != elm['position']:\n Write.__writeEmptyElement(f)\n i += 1\n Write.__writeElement(f, elm)\n i += 1\n if elm['position'] == 17:\n i = 0\n Write.__closeRow(f)\n if elm['number'] != 118:\n Write.__openRow(f)\n Write.__writeFooter(f)\n\n\ndef doTheJob(input_file):\n \"\"\"Do all we need\"\"\"\n Parse.parseFile(input_file)\n Write.writeFile(input_file.replace('.txt', '.html'))\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/env python3\n\nimport sys\n\n\nclass Parse:\n data = []\n\n def __parseLine(line):\n \"\"\"Parse the given line\"\"\"\n\n # extract name\n name_len = line.index(\" \")\n name = line[:name_len]\n line = line[name_len + 3:]\n\n # array-ize 'electron' val\n elec_pos = line.index(\"electron\") + 9\n line = line[:elec_pos] + '[' + line[elec_pos:].replace(' ', ',') + ']'\n\n # quote 'small' val\n line = line.replace(' ', '')\n line = line.replace('small:', 'small:\"').replace(',molar', '\",molar')\n\n # quote all keys\n for i in [\"position\", \"number\", \"small\", \"molar\", \"electron\"]:\n line = line.replace(i, '\"' + i + '\"')\n\n return eval('{\"name\":\"' + name + '\",' + line + '}')\n\n def parseFile(filename):\n \"\"\"Parse the given file\"\"\"\n\n Parse.data = []\n with open(filename, \"r\") as f:\n for line in f:\n Parse.data += [Parse.__parseLine(line)]\n return Parse.data\n\n\nclass Write:\n def __writeHeader(fd):\n \"\"\"Write html header\"\"\"\n\n print(\n \"<!DOCTYPE html>\",\n \"<html>\",\n \" <head>\",\n \" <title>Super Tableau 3000</title>\",\n \" <meta charset='utf-8' />\",\n \" <style>\", # ty alex for css!\n \" table { border-collapse: collapse; }\",\n \" td { border: solid; }\",\n \" h4, li { font-size:10px; }\",\n \" .empty { border: 0px; }\",\n \" </style>\",\n \" </head>\",\n \" <body>\",\n \" <table>\",\n sep=\"\\n\",\n file=fd\n )\n\n def __writeFooter(fd):\n \"\"\"Write html footer\"\"\"\n\n print(\n \" </table>\",\n \" </body>\",\n \"</html>\",\n sep=\"\\n\",\n file=fd\n )\n\n def __openRow(fd):\n \"\"\"Write opening html table row\"\"\"\n\n print(\" <tr>\", file=fd)\n\n def __closeRow(fd):\n \"\"\"Write closing html table row\"\"\"\n\n print(\" </tr>\", file=fd)\n\n def __writeElement(fd, elm):\n \"\"\"Write html table cell\"\"\"\n\n print(\n \" <td>\",\n \" <h4>\" + elm[\"name\"] + \"</h4>\",\n \" <ul>\",\n \" <li>\" + str(elm[\"number\"]) + \"</li>\",\n \" <li>\" + elm[\"small\"] + \"</li>\",\n \" <li>\" + str(elm[\"molar\"]) + \"</li>\",\n \" </ul>\",\n \" </td>\",\n sep=\"\\n\",\n file=fd\n )\n\n def __writeEmptyElement(fd):\n \"\"\"Write html empty table cell\"\"\"\n\n print(\" <td class='empty'></td>\", file=fd)\n\n def writeFile(filename):\n \"\"\"Write our awesome html file\"\"\"\n\n with open(filename, \"w\") as f:\n Write.__writeHeader(f)\n\n Write.__openRow(f)\n i = 0\n for elm in Parse.data:\n while i != elm[\"position\"]:\n Write.__writeEmptyElement(f)\n i += 1\n\n Write.__writeElement(f, elm)\n i += 1\n\n if elm[\"position\"] == 17:\n i = 0\n Write.__closeRow(f)\n if elm[\"number\"] != 118:\n Write.__openRow(f)\n\n Write.__writeFooter(f)\n\n\ndef doTheJob(input_file):\n \"\"\"Do all we need\"\"\"\n\n Parse.parseFile(input_file)\n Write.writeFile(input_file.replace(\".txt\", \".html\"))\n\n\nif __name__ == '__main__':\n if len(sys.argv) == 2:\n doTheJob(sys.argv[1])\n else:\n doTheJob(\"./ex07/periodic_table.txt\")\n",
"step-ids": [
8,
11,
12,
13,
16
]
}
|
[
8,
11,
12,
13,
16
] |
from setuptools import setup
import os.path
# Get the long description from the README file
with open('README.rst') as f:
long_description = f.read()
setup(name='logging_exceptions',
version='0.1.8',
py_modules=['logging_exceptions'],
author="Bernhard C. Thiel",
author_email="thiel@tbi.univie.ac.at",
description="Self-logging exceptions: Attach log messages to exceptions and output them conditionally.",
long_description=long_description,
url='https://github.com/Bernhard10/logging_exceptions',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
keywords='logging exceptions'
)
|
normal
|
{
"blob_id": "7f7adc367e4f3b8ee721e42f5d5d0770f40828c9",
"index": 9365,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('README.rst') as f:\n long_description = f.read()\nsetup(name='logging_exceptions', version='0.1.8', py_modules=[\n 'logging_exceptions'], author='Bernhard C. Thiel', author_email=\n 'thiel@tbi.univie.ac.at', description=\n 'Self-logging exceptions: Attach log messages to exceptions and output them conditionally.'\n , long_description=long_description, url=\n 'https://github.com/Bernhard10/logging_exceptions', license='MIT',\n classifiers=['Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5'], keywords='logging exceptions')\n",
"step-3": "from setuptools import setup\nimport os.path\nwith open('README.rst') as f:\n long_description = f.read()\nsetup(name='logging_exceptions', version='0.1.8', py_modules=[\n 'logging_exceptions'], author='Bernhard C. Thiel', author_email=\n 'thiel@tbi.univie.ac.at', description=\n 'Self-logging exceptions: Attach log messages to exceptions and output them conditionally.'\n , long_description=long_description, url=\n 'https://github.com/Bernhard10/logging_exceptions', license='MIT',\n classifiers=['Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5'], keywords='logging exceptions')\n",
"step-4": "from setuptools import setup\nimport os.path\n\n# Get the long description from the README file\nwith open('README.rst') as f:\n long_description = f.read()\n\n\nsetup(name='logging_exceptions',\n version='0.1.8',\n py_modules=['logging_exceptions'],\n author=\"Bernhard C. Thiel\",\n author_email=\"thiel@tbi.univie.ac.at\",\n description=\"Self-logging exceptions: Attach log messages to exceptions and output them conditionally.\",\n long_description=long_description,\n url='https://github.com/Bernhard10/logging_exceptions',\n license='MIT',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5'\n ],\n keywords='logging exceptions'\n\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
i = 0
while i >= 0:
a=input("Name is: ")
print(a)
if a == "Zeal":
print("""Name_Zeal.
Age_16.
Interested in Programming.""")
elif a=="HanZaw":
print("""Name_Han Zaw.
Age_18.
Studying Code at Green Hacker.""")
elif a == "Murphy":
print("""Name_Murphy.
Age_17.
Insterested in Editing.""")
elif a =="Ngal":
print("""Name_Ngal.
Age_17.
In Loved with Me:p""")
|
normal
|
{
"blob_id": "38184ed4117b1b7dcf9e135ce8612fa13c44a99c",
"index": 5891,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile i >= 0:\n a = input('Name is: ')\n print(a)\n if a == 'Zeal':\n print('Name_Zeal.\\n Age_16.\\n Interested in Programming.')\n elif a == 'HanZaw':\n print('Name_Han Zaw.\\n Age_18.\\n Studying Code at Green Hacker.')\n elif a == 'Murphy':\n print('Name_Murphy.\\n Age_17.\\n Insterested in Editing.')\n elif a == 'Ngal':\n print('Name_Ngal.\\n Age_17.\\n In Loved with Me:p')\n",
"step-3": "i = 0\nwhile i >= 0:\n a = input('Name is: ')\n print(a)\n if a == 'Zeal':\n print('Name_Zeal.\\n Age_16.\\n Interested in Programming.')\n elif a == 'HanZaw':\n print('Name_Han Zaw.\\n Age_18.\\n Studying Code at Green Hacker.')\n elif a == 'Murphy':\n print('Name_Murphy.\\n Age_17.\\n Insterested in Editing.')\n elif a == 'Ngal':\n print('Name_Ngal.\\n Age_17.\\n In Loved with Me:p')\n",
"step-4": "i = 0\nwhile i >= 0: \n a=input(\"Name is: \")\n print(a)\n if a == \"Zeal\":\n print(\"\"\"Name_Zeal.\n Age_16.\n Interested in Programming.\"\"\")\n elif a==\"HanZaw\":\n print(\"\"\"Name_Han Zaw.\n Age_18.\n Studying Code at Green Hacker.\"\"\")\n elif a == \"Murphy\": \n print(\"\"\"Name_Murphy.\n Age_17.\n Insterested in Editing.\"\"\") \n elif a ==\"Ngal\":\n print(\"\"\"Name_Ngal.\n Age_17.\n In Loved with Me:p\"\"\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):
""" Run bwa to align reads to reference genome """
print('Align reads with BWA MEM')
bwa_index_args = ['bwa', 'index', reference_genome]
process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',
reference_genome, forward_read, reverse_read]
process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=
subprocess.PIPE)
out, err = process.communicate()
sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')
with open(sam_file, 'w') as bwa_mem_out:
bwa_mem_out.write(out)
return sam_file
def run_samtools(sam_file, threads, output, i):
""" Sort and convert to BAM using samtools """
print('Convert SAM-file to BAM-file')
bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')
samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',
bam_file, sam_file]
process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
print('Sort BAM-file')
bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +
'.sorted.bam')
samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]
process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
samtools_index_args = ['samtools', 'index', bam_sorted_file]
process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
return bam_sorted_file
<|reserved_special_token_0|>
def main():
""" Main Application """
args = parse_arguments()
logging.basicConfig(filename='logging.log', level=logging.DEBUG)
output = args.output
reference_genome = args.draft_seq
if args.reverse:
reverse_read = args.reverse
else:
reverse_read = ''
forward_read = args.forward
threads = args.threads
iterations = args.iterations
pilon_path = args.pilon
logging.info('OUTPUT DIRECTORY:' + output)
logging.info('READS: ' + forward_read + ', ' + reverse_read)
logging.info('THREADS: ' + threads)
logging.info('ITERATIONS: ' + iterations)
pilon_output = os.path.join(output, 'pilon_1')
os.mkdir(output)
logging.info('START CORRECTION')
for i in range(int(iterations)):
logging.info('ITERATION: ' + str(i + 1))
logging.info('REFERENCE GENOME: ' + reference_genome)
logging.info('PILON OUTPUT: ' + pilon_output)
sam_file = run_bwa(reference_genome, forward_read, reverse_read,
threads, output, i)
bam_sorted_file = run_samtools(sam_file, threads, output, i)
run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,
pilon_path)
reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +
'.fasta')
pilon_output = os.path.join(output, 'pilon_' + str(i + 2))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):
""" Run bwa to align reads to reference genome """
print('Align reads with BWA MEM')
bwa_index_args = ['bwa', 'index', reference_genome]
process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',
reference_genome, forward_read, reverse_read]
process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=
subprocess.PIPE)
out, err = process.communicate()
sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')
with open(sam_file, 'w') as bwa_mem_out:
bwa_mem_out.write(out)
return sam_file
def run_samtools(sam_file, threads, output, i):
""" Sort and convert to BAM using samtools """
print('Convert SAM-file to BAM-file')
bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')
samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',
bam_file, sam_file]
process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
print('Sort BAM-file')
bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +
'.sorted.bam')
samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]
process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
samtools_index_args = ['samtools', 'index', bam_sorted_file]
process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
return bam_sorted_file
def run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,
pilon_path):
""" Run Pilon """
print('Run Pilon')
pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome',
reference_genome, '--frags', bam_sorted_file, '--threads', threads,
'--output', pilon_output]
process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE, stdout=
subprocess.PIPE)
out, err = process.communicate()
print(out)
with open(pilon_output + '.log', 'w') as pilon_log:
pilon_log.write(out)
def main():
""" Main Application """
args = parse_arguments()
logging.basicConfig(filename='logging.log', level=logging.DEBUG)
output = args.output
reference_genome = args.draft_seq
if args.reverse:
reverse_read = args.reverse
else:
reverse_read = ''
forward_read = args.forward
threads = args.threads
iterations = args.iterations
pilon_path = args.pilon
logging.info('OUTPUT DIRECTORY:' + output)
logging.info('READS: ' + forward_read + ', ' + reverse_read)
logging.info('THREADS: ' + threads)
logging.info('ITERATIONS: ' + iterations)
pilon_output = os.path.join(output, 'pilon_1')
os.mkdir(output)
logging.info('START CORRECTION')
for i in range(int(iterations)):
logging.info('ITERATION: ' + str(i + 1))
logging.info('REFERENCE GENOME: ' + reference_genome)
logging.info('PILON OUTPUT: ' + pilon_output)
sam_file = run_bwa(reference_genome, forward_read, reverse_read,
threads, output, i)
bam_sorted_file = run_samtools(sam_file, threads, output, i)
run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,
pilon_path)
reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +
'.fasta')
pilon_output = os.path.join(output, 'pilon_' + str(i + 2))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def parse_arguments():
""" Parse command line arguments """
parser = argparse.ArgumentParser(description='Run pilon many times')
parser.add_argument('--draft_seq', '-d', required=True, help=
'Draft sequence to correct')
parser.add_argument('--forward', '-f', required=True, help=
'Reads to use for correction')
parser.add_argument('--reverse', '-r', help='Reverse read for correction')
parser.add_argument('--output', '-o', required=True, help=
'Output directory')
parser.add_argument('--iterations', '-i', required=True, help=
'How many times to run pilon')
parser.add_argument('--threads', '-t', required=True, help='Threads to use'
)
parser.add_argument('--pilon', '-p', required=True, help=
'Path to pilon.jar')
args = parser.parse_args()
return args
def run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):
""" Run bwa to align reads to reference genome """
print('Align reads with BWA MEM')
bwa_index_args = ['bwa', 'index', reference_genome]
process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',
reference_genome, forward_read, reverse_read]
process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=
subprocess.PIPE)
out, err = process.communicate()
sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')
with open(sam_file, 'w') as bwa_mem_out:
bwa_mem_out.write(out)
return sam_file
def run_samtools(sam_file, threads, output, i):
""" Sort and convert to BAM using samtools """
print('Convert SAM-file to BAM-file')
bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')
samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',
bam_file, sam_file]
process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
print('Sort BAM-file')
bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +
'.sorted.bam')
samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]
process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
samtools_index_args = ['samtools', 'index', bam_sorted_file]
process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
return bam_sorted_file
def run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,
pilon_path):
""" Run Pilon """
print('Run Pilon')
pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome',
reference_genome, '--frags', bam_sorted_file, '--threads', threads,
'--output', pilon_output]
process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE, stdout=
subprocess.PIPE)
out, err = process.communicate()
print(out)
with open(pilon_output + '.log', 'w') as pilon_log:
pilon_log.write(out)
def main():
""" Main Application """
args = parse_arguments()
logging.basicConfig(filename='logging.log', level=logging.DEBUG)
output = args.output
reference_genome = args.draft_seq
if args.reverse:
reverse_read = args.reverse
else:
reverse_read = ''
forward_read = args.forward
threads = args.threads
iterations = args.iterations
pilon_path = args.pilon
logging.info('OUTPUT DIRECTORY:' + output)
logging.info('READS: ' + forward_read + ', ' + reverse_read)
logging.info('THREADS: ' + threads)
logging.info('ITERATIONS: ' + iterations)
pilon_output = os.path.join(output, 'pilon_1')
os.mkdir(output)
logging.info('START CORRECTION')
for i in range(int(iterations)):
logging.info('ITERATION: ' + str(i + 1))
logging.info('REFERENCE GENOME: ' + reference_genome)
logging.info('PILON OUTPUT: ' + pilon_output)
sam_file = run_bwa(reference_genome, forward_read, reverse_read,
threads, output, i)
bam_sorted_file = run_samtools(sam_file, threads, output, i)
run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,
pilon_path)
reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +
'.fasta')
pilon_output = os.path.join(output, 'pilon_' + str(i + 2))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os
import argparse
import logging
import subprocess
def parse_arguments():
""" Parse command line arguments """
parser = argparse.ArgumentParser(description='Run pilon many times')
parser.add_argument('--draft_seq', '-d', required=True, help=
'Draft sequence to correct')
parser.add_argument('--forward', '-f', required=True, help=
'Reads to use for correction')
parser.add_argument('--reverse', '-r', help='Reverse read for correction')
parser.add_argument('--output', '-o', required=True, help=
'Output directory')
parser.add_argument('--iterations', '-i', required=True, help=
'How many times to run pilon')
parser.add_argument('--threads', '-t', required=True, help='Threads to use'
)
parser.add_argument('--pilon', '-p', required=True, help=
'Path to pilon.jar')
args = parser.parse_args()
return args
def run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):
""" Run bwa to align reads to reference genome """
print('Align reads with BWA MEM')
bwa_index_args = ['bwa', 'index', reference_genome]
process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',
reference_genome, forward_read, reverse_read]
process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=
subprocess.PIPE)
out, err = process.communicate()
sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')
with open(sam_file, 'w') as bwa_mem_out:
bwa_mem_out.write(out)
return sam_file
def run_samtools(sam_file, threads, output, i):
""" Sort and convert to BAM using samtools """
print('Convert SAM-file to BAM-file')
bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')
samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',
bam_file, sam_file]
process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
print('Sort BAM-file')
bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +
'.sorted.bam')
samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]
process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
samtools_index_args = ['samtools', 'index', bam_sorted_file]
process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
return bam_sorted_file
def run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,
pilon_path):
""" Run Pilon """
print('Run Pilon')
pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome',
reference_genome, '--frags', bam_sorted_file, '--threads', threads,
'--output', pilon_output]
process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE, stdout=
subprocess.PIPE)
out, err = process.communicate()
print(out)
with open(pilon_output + '.log', 'w') as pilon_log:
pilon_log.write(out)
def main():
""" Main Application """
args = parse_arguments()
logging.basicConfig(filename='logging.log', level=logging.DEBUG)
output = args.output
reference_genome = args.draft_seq
if args.reverse:
reverse_read = args.reverse
else:
reverse_read = ''
forward_read = args.forward
threads = args.threads
iterations = args.iterations
pilon_path = args.pilon
logging.info('OUTPUT DIRECTORY:' + output)
logging.info('READS: ' + forward_read + ', ' + reverse_read)
logging.info('THREADS: ' + threads)
logging.info('ITERATIONS: ' + iterations)
pilon_output = os.path.join(output, 'pilon_1')
os.mkdir(output)
logging.info('START CORRECTION')
for i in range(int(iterations)):
logging.info('ITERATION: ' + str(i + 1))
logging.info('REFERENCE GENOME: ' + reference_genome)
logging.info('PILON OUTPUT: ' + pilon_output)
sam_file = run_bwa(reference_genome, forward_read, reverse_read,
threads, output, i)
bam_sorted_file = run_samtools(sam_file, threads, output, i)
run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,
pilon_path)
reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +
'.fasta')
pilon_output = os.path.join(output, 'pilon_' + str(i + 2))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
""" Script to run pilon iteratively to correct genome assemblies """
import os
import argparse
import logging
import subprocess
def parse_arguments():
""" Parse command line arguments """
# Create parser
parser = argparse.ArgumentParser(description='Run pilon many times')
# Add arguments
parser.add_argument('--draft_seq', '-d', required=True,
help='Draft sequence to correct')
parser.add_argument('--forward', '-f', required=True,
help='Reads to use for correction')
parser.add_argument('--reverse', '-r',
help='Reverse read for correction')
parser.add_argument('--output', '-o', required=True,
help='Output directory')
parser.add_argument('--iterations', '-i', required=True,
help='How many times to run pilon')
parser.add_argument('--threads', '-t', required=True,
help='Threads to use')
parser.add_argument('--pilon', '-p', required=True,
help='Path to pilon.jar')
# Parse arguments
args = parser.parse_args()
return args
def run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):
""" Run bwa to align reads to reference genome """
# Index ref genome
print('Align reads with BWA MEM')
bwa_index_args = ['bwa', 'index', reference_genome]
process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
# Align reads to reference genome
bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d', reference_genome, forward_read, reverse_read]
process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
# Write alignment to file
sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')
with open(sam_file, 'w') as bwa_mem_out:
bwa_mem_out.write(out)
return sam_file
def run_samtools(sam_file, threads, output, i):
""" Sort and convert to BAM using samtools """
# Conver the SAM-file to a BAM-file
print('Convert SAM-file to BAM-file')
bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')
samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',
bam_file, sam_file]
process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
# Sort and return the BAM-fil
print('Sort BAM-file')
bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sorted.bam')
samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]
process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
# Index sorted BAM-file
samtools_index_args = ['samtools', 'index', bam_sorted_file]
process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
return bam_sorted_file
def run_pilon(bam_sorted_file, reference_genome, pilon_output, threads, pilon_path):
""" Run Pilon """
print('Run Pilon')
pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome', reference_genome,
'--frags', bam_sorted_file, '--threads', threads, '--output',
pilon_output]
process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = process.communicate()
print(out)
with open(pilon_output + '.log', 'w') as pilon_log:
pilon_log.write(out)
def main():
""" Main Application """
# Get arguments
args = parse_arguments()
logging.basicConfig(filename='logging.log', level=logging.DEBUG)
output = args.output
reference_genome = args.draft_seq
if args.reverse:
reverse_read = args.reverse
else:
reverse_read = ""
forward_read = args.forward
threads = args.threads
iterations = args.iterations
pilon_path = args.pilon
logging.info('OUTPUT DIRECTORY:' + output)
logging.info('READS: ' + forward_read + ', ' + reverse_read)
logging.info('THREADS: ' + threads)
logging.info('ITERATIONS: ' + iterations)
# Set pilon output
pilon_output = os.path.join(output, 'pilon_1')
os.mkdir(output)
logging.info('START CORRECTION')
for i in range(int(iterations)):
# Log
logging.info('ITERATION: ' + str(i + 1))
logging.info('REFERENCE GENOME: ' + reference_genome)
logging.info('PILON OUTPUT: ' + pilon_output)
sam_file = run_bwa(reference_genome, forward_read, reverse_read, threads, output, i)
bam_sorted_file = run_samtools(sam_file, threads, output, i)
run_pilon(bam_sorted_file, reference_genome, pilon_output, threads, pilon_path)
# Set pilon output to new reference
reference_genome = os.path.join(output, 'pilon_' + str(i + 1) + '.fasta')
pilon_output = os.path.join(output, 'pilon_' + str(i + 2))
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "fdfb71595bf86fbe1763535814ec9c3cfd312d87",
"index": 2722,
"step-1": "<mask token>\n\n\ndef run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):\n \"\"\" Run bwa to align reads to reference genome \"\"\"\n print('Align reads with BWA MEM')\n bwa_index_args = ['bwa', 'index', reference_genome]\n process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',\n reference_genome, forward_read, reverse_read]\n process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE)\n out, err = process.communicate()\n sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')\n with open(sam_file, 'w') as bwa_mem_out:\n bwa_mem_out.write(out)\n return sam_file\n\n\ndef run_samtools(sam_file, threads, output, i):\n \"\"\" Sort and convert to BAM using samtools \"\"\"\n print('Convert SAM-file to BAM-file')\n bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')\n samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',\n bam_file, sam_file]\n process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n print('Sort BAM-file')\n bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +\n '.sorted.bam')\n samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]\n process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n samtools_index_args = ['samtools', 'index', bam_sorted_file]\n process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n return bam_sorted_file\n\n\n<mask token>\n\n\ndef main():\n \"\"\" Main Application \"\"\"\n args = parse_arguments()\n logging.basicConfig(filename='logging.log', level=logging.DEBUG)\n output = args.output\n reference_genome = args.draft_seq\n if args.reverse:\n reverse_read = args.reverse\n else:\n reverse_read = ''\n forward_read = args.forward\n threads = args.threads\n iterations = args.iterations\n pilon_path = args.pilon\n logging.info('OUTPUT DIRECTORY:' + output)\n logging.info('READS: ' + forward_read + ', ' + reverse_read)\n logging.info('THREADS: ' + threads)\n logging.info('ITERATIONS: ' + iterations)\n pilon_output = os.path.join(output, 'pilon_1')\n os.mkdir(output)\n logging.info('START CORRECTION')\n for i in range(int(iterations)):\n logging.info('ITERATION: ' + str(i + 1))\n logging.info('REFERENCE GENOME: ' + reference_genome)\n logging.info('PILON OUTPUT: ' + pilon_output)\n sam_file = run_bwa(reference_genome, forward_read, reverse_read,\n threads, output, i)\n bam_sorted_file = run_samtools(sam_file, threads, output, i)\n run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,\n pilon_path)\n reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +\n '.fasta')\n pilon_output = os.path.join(output, 'pilon_' + str(i + 2))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):\n \"\"\" Run bwa to align reads to reference genome \"\"\"\n print('Align reads with BWA MEM')\n bwa_index_args = ['bwa', 'index', reference_genome]\n process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',\n reference_genome, forward_read, reverse_read]\n process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE)\n out, err = process.communicate()\n sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')\n with open(sam_file, 'w') as bwa_mem_out:\n bwa_mem_out.write(out)\n return sam_file\n\n\ndef run_samtools(sam_file, threads, output, i):\n \"\"\" Sort and convert to BAM using samtools \"\"\"\n print('Convert SAM-file to BAM-file')\n bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')\n samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',\n bam_file, sam_file]\n process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n print('Sort BAM-file')\n bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +\n '.sorted.bam')\n samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]\n process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n samtools_index_args = ['samtools', 'index', bam_sorted_file]\n process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n return bam_sorted_file\n\n\ndef run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,\n pilon_path):\n \"\"\" Run Pilon \"\"\"\n print('Run Pilon')\n pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome',\n reference_genome, '--frags', bam_sorted_file, '--threads', threads,\n '--output', pilon_output]\n process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE)\n out, err = process.communicate()\n print(out)\n with open(pilon_output + '.log', 'w') as pilon_log:\n pilon_log.write(out)\n\n\ndef main():\n \"\"\" Main Application \"\"\"\n args = parse_arguments()\n logging.basicConfig(filename='logging.log', level=logging.DEBUG)\n output = args.output\n reference_genome = args.draft_seq\n if args.reverse:\n reverse_read = args.reverse\n else:\n reverse_read = ''\n forward_read = args.forward\n threads = args.threads\n iterations = args.iterations\n pilon_path = args.pilon\n logging.info('OUTPUT DIRECTORY:' + output)\n logging.info('READS: ' + forward_read + ', ' + reverse_read)\n logging.info('THREADS: ' + threads)\n logging.info('ITERATIONS: ' + iterations)\n pilon_output = os.path.join(output, 'pilon_1')\n os.mkdir(output)\n logging.info('START CORRECTION')\n for i in range(int(iterations)):\n logging.info('ITERATION: ' + str(i + 1))\n logging.info('REFERENCE GENOME: ' + reference_genome)\n logging.info('PILON OUTPUT: ' + pilon_output)\n sam_file = run_bwa(reference_genome, forward_read, reverse_read,\n threads, output, i)\n bam_sorted_file = run_samtools(sam_file, threads, output, i)\n run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,\n pilon_path)\n reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +\n '.fasta')\n pilon_output = os.path.join(output, 'pilon_' + str(i + 2))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef parse_arguments():\n \"\"\" Parse command line arguments \"\"\"\n parser = argparse.ArgumentParser(description='Run pilon many times')\n parser.add_argument('--draft_seq', '-d', required=True, help=\n 'Draft sequence to correct')\n parser.add_argument('--forward', '-f', required=True, help=\n 'Reads to use for correction')\n parser.add_argument('--reverse', '-r', help='Reverse read for correction')\n parser.add_argument('--output', '-o', required=True, help=\n 'Output directory')\n parser.add_argument('--iterations', '-i', required=True, help=\n 'How many times to run pilon')\n parser.add_argument('--threads', '-t', required=True, help='Threads to use'\n )\n parser.add_argument('--pilon', '-p', required=True, help=\n 'Path to pilon.jar')\n args = parser.parse_args()\n return args\n\n\ndef run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):\n \"\"\" Run bwa to align reads to reference genome \"\"\"\n print('Align reads with BWA MEM')\n bwa_index_args = ['bwa', 'index', reference_genome]\n process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',\n reference_genome, forward_read, reverse_read]\n process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE)\n out, err = process.communicate()\n sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')\n with open(sam_file, 'w') as bwa_mem_out:\n bwa_mem_out.write(out)\n return sam_file\n\n\ndef run_samtools(sam_file, threads, output, i):\n \"\"\" Sort and convert to BAM using samtools \"\"\"\n print('Convert SAM-file to BAM-file')\n bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')\n samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',\n bam_file, sam_file]\n process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n print('Sort BAM-file')\n bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +\n '.sorted.bam')\n samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]\n process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n samtools_index_args = ['samtools', 'index', bam_sorted_file]\n process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n return bam_sorted_file\n\n\ndef run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,\n pilon_path):\n \"\"\" Run Pilon \"\"\"\n print('Run Pilon')\n pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome',\n reference_genome, '--frags', bam_sorted_file, '--threads', threads,\n '--output', pilon_output]\n process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE)\n out, err = process.communicate()\n print(out)\n with open(pilon_output + '.log', 'w') as pilon_log:\n pilon_log.write(out)\n\n\ndef main():\n \"\"\" Main Application \"\"\"\n args = parse_arguments()\n logging.basicConfig(filename='logging.log', level=logging.DEBUG)\n output = args.output\n reference_genome = args.draft_seq\n if args.reverse:\n reverse_read = args.reverse\n else:\n reverse_read = ''\n forward_read = args.forward\n threads = args.threads\n iterations = args.iterations\n pilon_path = args.pilon\n logging.info('OUTPUT DIRECTORY:' + output)\n logging.info('READS: ' + forward_read + ', ' + reverse_read)\n logging.info('THREADS: ' + threads)\n logging.info('ITERATIONS: ' + iterations)\n pilon_output = os.path.join(output, 'pilon_1')\n os.mkdir(output)\n logging.info('START CORRECTION')\n for i in range(int(iterations)):\n logging.info('ITERATION: ' + str(i + 1))\n logging.info('REFERENCE GENOME: ' + reference_genome)\n logging.info('PILON OUTPUT: ' + pilon_output)\n sam_file = run_bwa(reference_genome, forward_read, reverse_read,\n threads, output, i)\n bam_sorted_file = run_samtools(sam_file, threads, output, i)\n run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,\n pilon_path)\n reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +\n '.fasta')\n pilon_output = os.path.join(output, 'pilon_' + str(i + 2))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nimport os\nimport argparse\nimport logging\nimport subprocess\n\n\ndef parse_arguments():\n \"\"\" Parse command line arguments \"\"\"\n parser = argparse.ArgumentParser(description='Run pilon many times')\n parser.add_argument('--draft_seq', '-d', required=True, help=\n 'Draft sequence to correct')\n parser.add_argument('--forward', '-f', required=True, help=\n 'Reads to use for correction')\n parser.add_argument('--reverse', '-r', help='Reverse read for correction')\n parser.add_argument('--output', '-o', required=True, help=\n 'Output directory')\n parser.add_argument('--iterations', '-i', required=True, help=\n 'How many times to run pilon')\n parser.add_argument('--threads', '-t', required=True, help='Threads to use'\n )\n parser.add_argument('--pilon', '-p', required=True, help=\n 'Path to pilon.jar')\n args = parser.parse_args()\n return args\n\n\ndef run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):\n \"\"\" Run bwa to align reads to reference genome \"\"\"\n print('Align reads with BWA MEM')\n bwa_index_args = ['bwa', 'index', reference_genome]\n process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d',\n reference_genome, forward_read, reverse_read]\n process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE)\n out, err = process.communicate()\n sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')\n with open(sam_file, 'w') as bwa_mem_out:\n bwa_mem_out.write(out)\n return sam_file\n\n\ndef run_samtools(sam_file, threads, output, i):\n \"\"\" Sort and convert to BAM using samtools \"\"\"\n print('Convert SAM-file to BAM-file')\n bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')\n samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',\n bam_file, sam_file]\n process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n print('Sort BAM-file')\n bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) +\n '.sorted.bam')\n samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]\n process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n samtools_index_args = ['samtools', 'index', bam_sorted_file]\n process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n return bam_sorted_file\n\n\ndef run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,\n pilon_path):\n \"\"\" Run Pilon \"\"\"\n print('Run Pilon')\n pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome',\n reference_genome, '--frags', bam_sorted_file, '--threads', threads,\n '--output', pilon_output]\n process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE)\n out, err = process.communicate()\n print(out)\n with open(pilon_output + '.log', 'w') as pilon_log:\n pilon_log.write(out)\n\n\ndef main():\n \"\"\" Main Application \"\"\"\n args = parse_arguments()\n logging.basicConfig(filename='logging.log', level=logging.DEBUG)\n output = args.output\n reference_genome = args.draft_seq\n if args.reverse:\n reverse_read = args.reverse\n else:\n reverse_read = ''\n forward_read = args.forward\n threads = args.threads\n iterations = args.iterations\n pilon_path = args.pilon\n logging.info('OUTPUT DIRECTORY:' + output)\n logging.info('READS: ' + forward_read + ', ' + reverse_read)\n logging.info('THREADS: ' + threads)\n logging.info('ITERATIONS: ' + iterations)\n pilon_output = os.path.join(output, 'pilon_1')\n os.mkdir(output)\n logging.info('START CORRECTION')\n for i in range(int(iterations)):\n logging.info('ITERATION: ' + str(i + 1))\n logging.info('REFERENCE GENOME: ' + reference_genome)\n logging.info('PILON OUTPUT: ' + pilon_output)\n sam_file = run_bwa(reference_genome, forward_read, reverse_read,\n threads, output, i)\n bam_sorted_file = run_samtools(sam_file, threads, output, i)\n run_pilon(bam_sorted_file, reference_genome, pilon_output, threads,\n pilon_path)\n reference_genome = os.path.join(output, 'pilon_' + str(i + 1) +\n '.fasta')\n pilon_output = os.path.join(output, 'pilon_' + str(i + 2))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\" Script to run pilon iteratively to correct genome assemblies \"\"\"\nimport os\nimport argparse\nimport logging\nimport subprocess\n\n\ndef parse_arguments():\n \"\"\" Parse command line arguments \"\"\"\n # Create parser\n parser = argparse.ArgumentParser(description='Run pilon many times')\n \n # Add arguments\n parser.add_argument('--draft_seq', '-d', required=True,\n help='Draft sequence to correct')\n parser.add_argument('--forward', '-f', required=True,\n help='Reads to use for correction')\n parser.add_argument('--reverse', '-r',\n help='Reverse read for correction')\n parser.add_argument('--output', '-o', required=True,\n help='Output directory')\n parser.add_argument('--iterations', '-i', required=True,\n help='How many times to run pilon')\n parser.add_argument('--threads', '-t', required=True,\n help='Threads to use')\n parser.add_argument('--pilon', '-p', required=True,\n help='Path to pilon.jar')\n # Parse arguments\n args = parser.parse_args()\n\n return args\n\ndef run_bwa(reference_genome, forward_read, reverse_read, threads, output, i):\n \"\"\" Run bwa to align reads to reference genome \"\"\"\n # Index ref genome\n print('Align reads with BWA MEM')\n bwa_index_args = ['bwa', 'index', reference_genome]\n process = subprocess.Popen(bwa_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n \n # Align reads to reference genome\n bwa_mem_args = ['bwa', 'mem', '-t', threads, '-x', 'ont2d', reference_genome, forward_read, reverse_read] \n process = subprocess.Popen(bwa_mem_args, stdin=subprocess.PIPE, \n stdout=subprocess.PIPE)\n out, err = process.communicate()\n\n # Write alignment to file\n sam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sam')\n with open(sam_file, 'w') as bwa_mem_out:\n bwa_mem_out.write(out)\n \n return sam_file\n\ndef run_samtools(sam_file, threads, output, i):\n \"\"\" Sort and convert to BAM using samtools \"\"\"\n\n # Conver the SAM-file to a BAM-file\n print('Convert SAM-file to BAM-file')\n bam_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.bam')\n samtools_view_args = ['samtools', 'view', '-@', threads, '-bS', '-o',\n bam_file, sam_file]\n process = subprocess.Popen(samtools_view_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n \n # Sort and return the BAM-fil\n print('Sort BAM-file')\n bam_sorted_file = os.path.join(output, 'bwa_mem_' + str(i + 1) + '.sorted.bam')\n samtools_sort_args = ['samtools', 'sort', bam_file, '-o', bam_sorted_file]\n process = subprocess.Popen(samtools_sort_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n\n # Index sorted BAM-file\n samtools_index_args = ['samtools', 'index', bam_sorted_file]\n process = subprocess.Popen(samtools_index_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n\n return bam_sorted_file\n\ndef run_pilon(bam_sorted_file, reference_genome, pilon_output, threads, pilon_path):\n \"\"\" Run Pilon \"\"\"\n print('Run Pilon')\n pilon_args = ['java', '-Xmx16G', '-jar', pilon_path, '--genome', reference_genome,\n '--frags', bam_sorted_file, '--threads', threads, '--output',\n pilon_output]\n process = subprocess.Popen(pilon_args, stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n print(out)\n with open(pilon_output + '.log', 'w') as pilon_log:\n pilon_log.write(out)\n\ndef main():\n \"\"\" Main Application \"\"\"\n # Get arguments\n args = parse_arguments()\n \n logging.basicConfig(filename='logging.log', level=logging.DEBUG)\n \n output = args.output\n reference_genome = args.draft_seq\n if args.reverse:\n reverse_read = args.reverse\n else:\n reverse_read = \"\"\n forward_read = args.forward\n threads = args.threads\n iterations = args.iterations\n pilon_path = args.pilon\n logging.info('OUTPUT DIRECTORY:' + output)\n logging.info('READS: ' + forward_read + ', ' + reverse_read) \n logging.info('THREADS: ' + threads)\n logging.info('ITERATIONS: ' + iterations)\n\n # Set pilon output\n pilon_output = os.path.join(output, 'pilon_1')\n os.mkdir(output)\n\n logging.info('START CORRECTION')\n for i in range(int(iterations)):\n # Log\n logging.info('ITERATION: ' + str(i + 1))\n logging.info('REFERENCE GENOME: ' + reference_genome)\n logging.info('PILON OUTPUT: ' + pilon_output)\n sam_file = run_bwa(reference_genome, forward_read, reverse_read, threads, output, i)\n bam_sorted_file = run_samtools(sam_file, threads, output, i)\n run_pilon(bam_sorted_file, reference_genome, pilon_output, threads, pilon_path)\n\n # Set pilon output to new reference\n reference_genome = os.path.join(output, 'pilon_' + str(i + 1) + '.fasta')\n pilon_output = os.path.join(output, 'pilon_' + str(i + 2))\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
#!/usr/bin/env python3
import sys
import os
import math
import random
if hasattr(sys, '__interactivehook__'):
del sys.__interactivehook__
print('Python3 startup file loaded from ~/.config/pystartup.py')
|
normal
|
{
"blob_id": "5ddde3aa6eaa30b70743272a532874663067eed6",
"index": 3157,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif hasattr(sys, '__interactivehook__'):\n del sys.__interactivehook__\nprint('Python3 startup file loaded from ~/.config/pystartup.py')\n",
"step-3": "import sys\nimport os\nimport math\nimport random\nif hasattr(sys, '__interactivehook__'):\n del sys.__interactivehook__\nprint('Python3 startup file loaded from ~/.config/pystartup.py')\n",
"step-4": "#!/usr/bin/env python3\n\nimport sys\nimport os\nimport math\nimport random\n\nif hasattr(sys, '__interactivehook__'):\n del sys.__interactivehook__\n\nprint('Python3 startup file loaded from ~/.config/pystartup.py')\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import shutil
import tempfile
import salt.runners.net as net
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase, skipIf
@skipIf(not net.HAS_NAPALM, "napalm module required for this test")
class NetTest(TestCase, LoaderModuleMockMixin):
"""
Test the net runner
"""
def setup_loader_modules(self):
mock_get = MagicMock(return_value={})
self.extmods_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
self.addCleanup(shutil.rmtree, self.extmods_dir, ignore_errors=True)
return {
net: {
"__opts__": {
"optimization_order": [0, 1, 2],
"renderer": "yaml",
"renderer_blacklist": [],
"renderer_whitelist": [],
"extension_modules": self.extmods_dir,
},
"__salt__": {"mine.get": mock_get},
}
}
def test_interfaces(self):
ret = net.interfaces()
self.assertEqual(None, ret)
def test_findarp(self):
ret = net.findarp()
self.assertEqual(None, ret)
def test_findmac(self):
ret = net.findmac()
self.assertEqual(None, ret)
def test_lldp(self):
ret = net.lldp()
self.assertEqual(None, ret)
def test_find(self):
ret = net.find("")
self.assertEqual({}, ret)
def test_multi_find(self):
ret = net.multi_find()
self.assertEqual(None, ret)
|
normal
|
{
"blob_id": "0fb288e3ab074e021ec726d71cbd5c8546a8455b",
"index": 744,
"step-1": "<mask token>\n\n\n@skipIf(not net.HAS_NAPALM, 'napalm module required for this test')\nclass NetTest(TestCase, LoaderModuleMockMixin):\n <mask token>\n <mask token>\n\n def test_interfaces(self):\n ret = net.interfaces()\n self.assertEqual(None, ret)\n\n def test_findarp(self):\n ret = net.findarp()\n self.assertEqual(None, ret)\n <mask token>\n <mask token>\n\n def test_find(self):\n ret = net.find('')\n self.assertEqual({}, ret)\n\n def test_multi_find(self):\n ret = net.multi_find()\n self.assertEqual(None, ret)\n",
"step-2": "<mask token>\n\n\n@skipIf(not net.HAS_NAPALM, 'napalm module required for this test')\nclass NetTest(TestCase, LoaderModuleMockMixin):\n <mask token>\n <mask token>\n\n def test_interfaces(self):\n ret = net.interfaces()\n self.assertEqual(None, ret)\n\n def test_findarp(self):\n ret = net.findarp()\n self.assertEqual(None, ret)\n\n def test_findmac(self):\n ret = net.findmac()\n self.assertEqual(None, ret)\n <mask token>\n\n def test_find(self):\n ret = net.find('')\n self.assertEqual({}, ret)\n\n def test_multi_find(self):\n ret = net.multi_find()\n self.assertEqual(None, ret)\n",
"step-3": "<mask token>\n\n\n@skipIf(not net.HAS_NAPALM, 'napalm module required for this test')\nclass NetTest(TestCase, LoaderModuleMockMixin):\n <mask token>\n\n def setup_loader_modules(self):\n mock_get = MagicMock(return_value={})\n self.extmods_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)\n self.addCleanup(shutil.rmtree, self.extmods_dir, ignore_errors=True)\n return {net: {'__opts__': {'optimization_order': [0, 1, 2],\n 'renderer': 'yaml', 'renderer_blacklist': [],\n 'renderer_whitelist': [], 'extension_modules': self.extmods_dir\n }, '__salt__': {'mine.get': mock_get}}}\n\n def test_interfaces(self):\n ret = net.interfaces()\n self.assertEqual(None, ret)\n\n def test_findarp(self):\n ret = net.findarp()\n self.assertEqual(None, ret)\n\n def test_findmac(self):\n ret = net.findmac()\n self.assertEqual(None, ret)\n\n def test_lldp(self):\n ret = net.lldp()\n self.assertEqual(None, ret)\n\n def test_find(self):\n ret = net.find('')\n self.assertEqual({}, ret)\n\n def test_multi_find(self):\n ret = net.multi_find()\n self.assertEqual(None, ret)\n",
"step-4": "import shutil\nimport tempfile\nimport salt.runners.net as net\nfrom tests.support.mixins import LoaderModuleMockMixin\nfrom tests.support.mock import MagicMock\nfrom tests.support.runtests import RUNTIME_VARS\nfrom tests.support.unit import TestCase, skipIf\n\n\n@skipIf(not net.HAS_NAPALM, 'napalm module required for this test')\nclass NetTest(TestCase, LoaderModuleMockMixin):\n \"\"\"\n Test the net runner\n \"\"\"\n\n def setup_loader_modules(self):\n mock_get = MagicMock(return_value={})\n self.extmods_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)\n self.addCleanup(shutil.rmtree, self.extmods_dir, ignore_errors=True)\n return {net: {'__opts__': {'optimization_order': [0, 1, 2],\n 'renderer': 'yaml', 'renderer_blacklist': [],\n 'renderer_whitelist': [], 'extension_modules': self.extmods_dir\n }, '__salt__': {'mine.get': mock_get}}}\n\n def test_interfaces(self):\n ret = net.interfaces()\n self.assertEqual(None, ret)\n\n def test_findarp(self):\n ret = net.findarp()\n self.assertEqual(None, ret)\n\n def test_findmac(self):\n ret = net.findmac()\n self.assertEqual(None, ret)\n\n def test_lldp(self):\n ret = net.lldp()\n self.assertEqual(None, ret)\n\n def test_find(self):\n ret = net.find('')\n self.assertEqual({}, ret)\n\n def test_multi_find(self):\n ret = net.multi_find()\n self.assertEqual(None, ret)\n",
"step-5": "import shutil\nimport tempfile\n\nimport salt.runners.net as net\nfrom tests.support.mixins import LoaderModuleMockMixin\nfrom tests.support.mock import MagicMock\nfrom tests.support.runtests import RUNTIME_VARS\nfrom tests.support.unit import TestCase, skipIf\n\n\n@skipIf(not net.HAS_NAPALM, \"napalm module required for this test\")\nclass NetTest(TestCase, LoaderModuleMockMixin):\n \"\"\"\n Test the net runner\n \"\"\"\n\n def setup_loader_modules(self):\n mock_get = MagicMock(return_value={})\n self.extmods_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)\n self.addCleanup(shutil.rmtree, self.extmods_dir, ignore_errors=True)\n return {\n net: {\n \"__opts__\": {\n \"optimization_order\": [0, 1, 2],\n \"renderer\": \"yaml\",\n \"renderer_blacklist\": [],\n \"renderer_whitelist\": [],\n \"extension_modules\": self.extmods_dir,\n },\n \"__salt__\": {\"mine.get\": mock_get},\n }\n }\n\n def test_interfaces(self):\n ret = net.interfaces()\n self.assertEqual(None, ret)\n\n def test_findarp(self):\n ret = net.findarp()\n self.assertEqual(None, ret)\n\n def test_findmac(self):\n ret = net.findmac()\n self.assertEqual(None, ret)\n\n def test_lldp(self):\n ret = net.lldp()\n self.assertEqual(None, ret)\n\n def test_find(self):\n ret = net.find(\"\")\n self.assertEqual({}, ret)\n\n def test_multi_find(self):\n ret = net.multi_find()\n self.assertEqual(None, ret)\n",
"step-ids": [
5,
6,
8,
10,
11
]
}
|
[
5,
6,
8,
10,
11
] |
class Pwm():
def __init__(self, number, path, features):
self.id = number
self.path = path + 'pwm' + number
self.features = features
self.duty = self.get_feature('')
self.enable = self.get_feature('_enable')
def get_feature(self, feature):
return self.features['pwm' + self.id + feature]
def set_feature(self, feature, value=0):
pass
def __str__(self):
return 'pwm{}'.format(self.id)
|
normal
|
{
"blob_id": "c38aff77a7beebc13e7486150d549b876c830db8",
"index": 6104,
"step-1": "class Pwm:\n\n def __init__(self, number, path, features):\n self.id = number\n self.path = path + 'pwm' + number\n self.features = features\n self.duty = self.get_feature('')\n self.enable = self.get_feature('_enable')\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class Pwm:\n\n def __init__(self, number, path, features):\n self.id = number\n self.path = path + 'pwm' + number\n self.features = features\n self.duty = self.get_feature('')\n self.enable = self.get_feature('_enable')\n <mask token>\n <mask token>\n\n def __str__(self):\n return 'pwm{}'.format(self.id)\n",
"step-3": "class Pwm:\n\n def __init__(self, number, path, features):\n self.id = number\n self.path = path + 'pwm' + number\n self.features = features\n self.duty = self.get_feature('')\n self.enable = self.get_feature('_enable')\n\n def get_feature(self, feature):\n return self.features['pwm' + self.id + feature]\n <mask token>\n\n def __str__(self):\n return 'pwm{}'.format(self.id)\n",
"step-4": "class Pwm:\n\n def __init__(self, number, path, features):\n self.id = number\n self.path = path + 'pwm' + number\n self.features = features\n self.duty = self.get_feature('')\n self.enable = self.get_feature('_enable')\n\n def get_feature(self, feature):\n return self.features['pwm' + self.id + feature]\n\n def set_feature(self, feature, value=0):\n pass\n\n def __str__(self):\n return 'pwm{}'.format(self.id)\n",
"step-5": "\nclass Pwm():\n\n\tdef __init__(self, number, path, features):\n\t\tself.id = number\n\t\tself.path = path + 'pwm' + number\n\t\tself.features = features\n\t\tself.duty = self.get_feature('')\n\t\tself.enable = self.get_feature('_enable')\n\n\tdef get_feature(self, feature):\n\t\treturn self.features['pwm' + self.id + feature]\n\n\tdef set_feature(self, feature, value=0):\n\t\tpass\n\n\tdef __str__(self):\n\t\treturn 'pwm{}'.format(self.id)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# coding=utf-8
from __future__ import print_function
import os
import sys
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
basedir = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.append('trainer')
sys.path.append('downloader')
from gen.gen_captcha import gen_dataset, load_templates, candidates
from gen.img_process import grey_to_binary, clear_paper_noise
from model.nn import load_model_nn
from model.common import find_model_ckpt
import tensorflow as tf
from gen.utils import vec2str
import numpy as np
from PIL import Image
from downloader import download
def show_im(dataset):
data = np.uint8(dataset[0]).reshape((30, 96)) * 255
im = Image.fromarray(data)
im.show()
def test_model(captcha):
im = Image.open(os.path.join(basedir, 'downloader', 'captchas', captcha))
im = im.convert('L')
im = grey_to_binary(im)
im = clear_paper_noise(im, 5)
# im.show()
# templates = load_templates(os.path.join('trainer', 'templates'))
model = load_model_nn()
x = model['x']
keep_prob = model['keep_prob']
saver = model['saver']
prediction = model['prediction']
graph = model['graph']
model_ckpt_path, _ = find_model_ckpt(os.path.join('trainer', '.checkpoint'))
# print("Used the model:", model_ckpt_path)
with tf.Session(graph=graph) as session:
tf.global_variables_initializer().run()
saver.restore(session, model_ckpt_path)
# dataset, labels = gen_dataset(1, templates) # generate one image
dataset = []
dataset.append(np.asarray(im.convert("L")).reshape([30 * 96]) / 255)
label = prediction.eval(feed_dict={x: dataset, keep_prob: 1.0}, session=session)[0]
string = ''
for i in range(4):
string += chr(label[i] + ord('0'))
print(string)
if __name__ == "__main__":
if len(sys.argv) <= 1:
captcha = download(1)[0]
else:
captcha = sys.argv[1]
test_model(captcha)
|
normal
|
{
"blob_id": "8e34b5e15c5b6107d6841e7b567abf967c631f1b",
"index": 7440,
"step-1": "<mask token>\n\n\ndef show_im(dataset):\n data = np.uint8(dataset[0]).reshape((30, 96)) * 255\n im = Image.fromarray(data)\n im.show()\n\n\ndef test_model(captcha):\n im = Image.open(os.path.join(basedir, 'downloader', 'captchas', captcha))\n im = im.convert('L')\n im = grey_to_binary(im)\n im = clear_paper_noise(im, 5)\n model = load_model_nn()\n x = model['x']\n keep_prob = model['keep_prob']\n saver = model['saver']\n prediction = model['prediction']\n graph = model['graph']\n model_ckpt_path, _ = find_model_ckpt(os.path.join('trainer', '.checkpoint')\n )\n with tf.Session(graph=graph) as session:\n tf.global_variables_initializer().run()\n saver.restore(session, model_ckpt_path)\n dataset = []\n dataset.append(np.asarray(im.convert('L')).reshape([30 * 96]) / 255)\n label = prediction.eval(feed_dict={x: dataset, keep_prob: 1.0},\n session=session)[0]\n string = ''\n for i in range(4):\n string += chr(label[i] + ord('0'))\n print(string)\n\n\n<mask token>\n",
"step-2": "<mask token>\nos.chdir(os.path.dirname(os.path.abspath(__file__)))\nsys.path.append('trainer')\nsys.path.append('downloader')\n<mask token>\n\n\ndef show_im(dataset):\n data = np.uint8(dataset[0]).reshape((30, 96)) * 255\n im = Image.fromarray(data)\n im.show()\n\n\ndef test_model(captcha):\n im = Image.open(os.path.join(basedir, 'downloader', 'captchas', captcha))\n im = im.convert('L')\n im = grey_to_binary(im)\n im = clear_paper_noise(im, 5)\n model = load_model_nn()\n x = model['x']\n keep_prob = model['keep_prob']\n saver = model['saver']\n prediction = model['prediction']\n graph = model['graph']\n model_ckpt_path, _ = find_model_ckpt(os.path.join('trainer', '.checkpoint')\n )\n with tf.Session(graph=graph) as session:\n tf.global_variables_initializer().run()\n saver.restore(session, model_ckpt_path)\n dataset = []\n dataset.append(np.asarray(im.convert('L')).reshape([30 * 96]) / 255)\n label = prediction.eval(feed_dict={x: dataset, keep_prob: 1.0},\n session=session)[0]\n string = ''\n for i in range(4):\n string += chr(label[i] + ord('0'))\n print(string)\n\n\nif __name__ == '__main__':\n if len(sys.argv) <= 1:\n captcha = download(1)[0]\n else:\n captcha = sys.argv[1]\n test_model(captcha)\n",
"step-3": "<mask token>\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\nbasedir = os.getcwd()\nos.chdir(os.path.dirname(os.path.abspath(__file__)))\nsys.path.append('trainer')\nsys.path.append('downloader')\n<mask token>\n\n\ndef show_im(dataset):\n data = np.uint8(dataset[0]).reshape((30, 96)) * 255\n im = Image.fromarray(data)\n im.show()\n\n\ndef test_model(captcha):\n im = Image.open(os.path.join(basedir, 'downloader', 'captchas', captcha))\n im = im.convert('L')\n im = grey_to_binary(im)\n im = clear_paper_noise(im, 5)\n model = load_model_nn()\n x = model['x']\n keep_prob = model['keep_prob']\n saver = model['saver']\n prediction = model['prediction']\n graph = model['graph']\n model_ckpt_path, _ = find_model_ckpt(os.path.join('trainer', '.checkpoint')\n )\n with tf.Session(graph=graph) as session:\n tf.global_variables_initializer().run()\n saver.restore(session, model_ckpt_path)\n dataset = []\n dataset.append(np.asarray(im.convert('L')).reshape([30 * 96]) / 255)\n label = prediction.eval(feed_dict={x: dataset, keep_prob: 1.0},\n session=session)[0]\n string = ''\n for i in range(4):\n string += chr(label[i] + ord('0'))\n print(string)\n\n\nif __name__ == '__main__':\n if len(sys.argv) <= 1:\n captcha = download(1)[0]\n else:\n captcha = sys.argv[1]\n test_model(captcha)\n",
"step-4": "from __future__ import print_function\nimport os\nimport sys\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\nbasedir = os.getcwd()\nos.chdir(os.path.dirname(os.path.abspath(__file__)))\nsys.path.append('trainer')\nsys.path.append('downloader')\nfrom gen.gen_captcha import gen_dataset, load_templates, candidates\nfrom gen.img_process import grey_to_binary, clear_paper_noise\nfrom model.nn import load_model_nn\nfrom model.common import find_model_ckpt\nimport tensorflow as tf\nfrom gen.utils import vec2str\nimport numpy as np\nfrom PIL import Image\nfrom downloader import download\n\n\ndef show_im(dataset):\n data = np.uint8(dataset[0]).reshape((30, 96)) * 255\n im = Image.fromarray(data)\n im.show()\n\n\ndef test_model(captcha):\n im = Image.open(os.path.join(basedir, 'downloader', 'captchas', captcha))\n im = im.convert('L')\n im = grey_to_binary(im)\n im = clear_paper_noise(im, 5)\n model = load_model_nn()\n x = model['x']\n keep_prob = model['keep_prob']\n saver = model['saver']\n prediction = model['prediction']\n graph = model['graph']\n model_ckpt_path, _ = find_model_ckpt(os.path.join('trainer', '.checkpoint')\n )\n with tf.Session(graph=graph) as session:\n tf.global_variables_initializer().run()\n saver.restore(session, model_ckpt_path)\n dataset = []\n dataset.append(np.asarray(im.convert('L')).reshape([30 * 96]) / 255)\n label = prediction.eval(feed_dict={x: dataset, keep_prob: 1.0},\n session=session)[0]\n string = ''\n for i in range(4):\n string += chr(label[i] + ord('0'))\n print(string)\n\n\nif __name__ == '__main__':\n if len(sys.argv) <= 1:\n captcha = download(1)[0]\n else:\n captcha = sys.argv[1]\n test_model(captcha)\n",
"step-5": "# coding=utf-8\nfrom __future__ import print_function\n\nimport os\nimport sys\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\nbasedir = os.getcwd()\nos.chdir(os.path.dirname(os.path.abspath(__file__)))\nsys.path.append('trainer')\nsys.path.append('downloader')\n\nfrom gen.gen_captcha import gen_dataset, load_templates, candidates\nfrom gen.img_process import grey_to_binary, clear_paper_noise\nfrom model.nn import load_model_nn\nfrom model.common import find_model_ckpt\nimport tensorflow as tf\nfrom gen.utils import vec2str\nimport numpy as np\nfrom PIL import Image\nfrom downloader import download\n\ndef show_im(dataset):\n data = np.uint8(dataset[0]).reshape((30, 96)) * 255\n im = Image.fromarray(data)\n im.show()\n\ndef test_model(captcha):\n im = Image.open(os.path.join(basedir, 'downloader', 'captchas', captcha))\n im = im.convert('L')\n im = grey_to_binary(im)\n im = clear_paper_noise(im, 5)\n # im.show()\n # templates = load_templates(os.path.join('trainer', 'templates'))\n\n model = load_model_nn()\n x = model['x']\n keep_prob = model['keep_prob']\n saver = model['saver']\n prediction = model['prediction']\n graph = model['graph']\n model_ckpt_path, _ = find_model_ckpt(os.path.join('trainer', '.checkpoint'))\n # print(\"Used the model:\", model_ckpt_path)\n\n\n with tf.Session(graph=graph) as session:\n tf.global_variables_initializer().run()\n saver.restore(session, model_ckpt_path)\n\n # dataset, labels = gen_dataset(1, templates) # generate one image\n dataset = []\n dataset.append(np.asarray(im.convert(\"L\")).reshape([30 * 96]) / 255)\n\n label = prediction.eval(feed_dict={x: dataset, keep_prob: 1.0}, session=session)[0]\n string = ''\n for i in range(4):\n string += chr(label[i] + ord('0'))\n print(string)\n\n\nif __name__ == \"__main__\":\n if len(sys.argv) <= 1:\n captcha = download(1)[0]\n else:\n captcha = sys.argv[1]\n test_model(captcha)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def sieve(limit):
numbers = list(range(3, limit, 2))
for prime in numbers:
for multiplier in reversed(range(2, limit)):
try:
numbers.remove(prime * multiplier)
except ValueError:
pass
return [2] + numbers
|
flexible
|
{
"blob_id": "ec7ca03f627eaa635aac56e302b9c40bf0a3da38",
"index": 1796,
"step-1": "<mask token>\n",
"step-2": "def sieve(limit):\n numbers = list(range(3, limit, 2))\n for prime in numbers:\n for multiplier in reversed(range(2, limit)):\n try:\n numbers.remove(prime * multiplier)\n except ValueError:\n pass\n return [2] + numbers\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
def get_card_size(card_width, image):
card_height = image.get_height() / (image.get_width() / card_width)
return round(card_height)
<|reserved_special_token_0|>
def upload_card_images(card_name):
card_n = pygame.image.load(os.path.join('images', card_name + '.png'))
card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))
return card_n
<|reserved_special_token_0|>
def draw_player(win, x, y, width, height, cards, card_images):
i = 0
for card in cards:
win.blit(card_images[card.name], (x + i * width, y))
card.position = x + i * width, y, x + i * width + width, y + height
i += 1
def draw_opponents(win, x, y, width, height, back_image, count, hor=True):
if hor:
for i in range(count):
win.blit(back_image, (x + i * width, y))
else:
for i in range(count):
win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *
height))
def draw_played_cards(win, cards, card_images, turn_order):
position = [(300, 300), (315, 260), (330, 300)]
counter = turn_order
for _ in range(len(cards)):
win.blit(card_images[cards[0].name], position[counter])
turn_order = (counter + 1) % 3
def main():
run = True
clock = pygame.time.Clock()
main_font = pygame.font.SysFont('comicsans', 30)
n = Network()
player = n.connect()
def redraw_window(win):
win.fill((53, 101, 77))
draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,
CARD_IMAGES)
draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8)
draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8, hor=False)
draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.
turn_order)
if player.turn == True:
for card in player.Cards:
if card.position[0] >= pos[0] and card.position[1] >= pos[1
] and card.position[2] <= pos[0] and card.position[3
] <= pos[1]:
player.cards.remove(card)
player.played_card = True
player.last_played_card = card
player.turn = False
pygame.display.update()
while run:
pos = -5, -5
clock.tick(60)
game = n.send(player)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:
pos = pygame.mouse.get_pos()
redraw_window(WIN)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pygame.font.init()
<|reserved_special_token_0|>
pygame.display.set_caption('Zole')
<|reserved_special_token_0|>
def get_card_size(card_width, image):
card_height = image.get_height() / (image.get_width() / card_width)
return round(card_height)
<|reserved_special_token_0|>
def upload_card_images(card_name):
card_n = pygame.image.load(os.path.join('images', card_name + '.png'))
card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))
return card_n
<|reserved_special_token_0|>
for name in CARD_NAMES:
CARD_IMAGES[name] = upload_card_images(name)
<|reserved_special_token_0|>
def draw_player(win, x, y, width, height, cards, card_images):
i = 0
for card in cards:
win.blit(card_images[card.name], (x + i * width, y))
card.position = x + i * width, y, x + i * width + width, y + height
i += 1
def draw_opponents(win, x, y, width, height, back_image, count, hor=True):
if hor:
for i in range(count):
win.blit(back_image, (x + i * width, y))
else:
for i in range(count):
win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *
height))
def draw_played_cards(win, cards, card_images, turn_order):
position = [(300, 300), (315, 260), (330, 300)]
counter = turn_order
for _ in range(len(cards)):
win.blit(card_images[cards[0].name], position[counter])
turn_order = (counter + 1) % 3
def main():
run = True
clock = pygame.time.Clock()
main_font = pygame.font.SysFont('comicsans', 30)
n = Network()
player = n.connect()
def redraw_window(win):
win.fill((53, 101, 77))
draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,
CARD_IMAGES)
draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8)
draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8, hor=False)
draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.
turn_order)
if player.turn == True:
for card in player.Cards:
if card.position[0] >= pos[0] and card.position[1] >= pos[1
] and card.position[2] <= pos[0] and card.position[3
] <= pos[1]:
player.cards.remove(card)
player.played_card = True
player.last_played_card = card
player.turn = False
pygame.display.update()
while run:
pos = -5, -5
clock.tick(60)
game = n.send(player)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:
pos = pygame.mouse.get_pos()
redraw_window(WIN)
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pygame.font.init()
WIDTH, HEIGHT = 700, 800
WIN = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption('Zole')
CARD_WIDTH = 60
def get_card_size(card_width, image):
card_height = image.get_height() / (image.get_width() / card_width)
return round(card_height)
CARD_IMAGE_BACK_GRAY = pygame.image.load(os.path.join('images',
'gray_back.png'))
CARD_HEIGHT = get_card_size(CARD_WIDTH, CARD_IMAGE_BACK_GRAY)
CARD_IMAGE_BACK_GRAY = pygame.transform.scale(CARD_IMAGE_BACK_GRAY, (
CARD_WIDTH, CARD_HEIGHT))
def upload_card_images(card_name):
card_n = pygame.image.load(os.path.join('images', card_name + '.png'))
card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))
return card_n
CARD_NAMES = ['AC', 'AH', 'AS', 'AD', 'KS', 'KH', 'KD', 'KC', 'QS', 'QH',
'QD', 'QC', 'JS', 'JH', 'JD', 'JC', '10S', '10H', '10D', '10C', '9S',
'9H', '9D', '9C', '8D', '7D']
CARD_IMAGES = {}
for name in CARD_NAMES:
CARD_IMAGES[name] = upload_card_images(name)
STRENGTH_SCALE_TRUMPS = ['QC', 'QS', 'QH', 'QD', 'JC', 'JS', 'JH', 'JD',
'AD', '10D', 'KD', '9D', '8D', '7D', 'AC', '10C', 'KC', '9C', 'AH',
'10H', 'KH', '9H', 'AS', '10S', 'KS', '9S', 'None']
STRENGTH_SCALE_NON_TRUMPS = ['A', '10', 'K', '9', 'None']
def draw_player(win, x, y, width, height, cards, card_images):
i = 0
for card in cards:
win.blit(card_images[card.name], (x + i * width, y))
card.position = x + i * width, y, x + i * width + width, y + height
i += 1
def draw_opponents(win, x, y, width, height, back_image, count, hor=True):
if hor:
for i in range(count):
win.blit(back_image, (x + i * width, y))
else:
for i in range(count):
win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *
height))
def draw_played_cards(win, cards, card_images, turn_order):
position = [(300, 300), (315, 260), (330, 300)]
counter = turn_order
for _ in range(len(cards)):
win.blit(card_images[cards[0].name], position[counter])
turn_order = (counter + 1) % 3
def main():
run = True
clock = pygame.time.Clock()
main_font = pygame.font.SysFont('comicsans', 30)
n = Network()
player = n.connect()
def redraw_window(win):
win.fill((53, 101, 77))
draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,
CARD_IMAGES)
draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8)
draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8, hor=False)
draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.
turn_order)
if player.turn == True:
for card in player.Cards:
if card.position[0] >= pos[0] and card.position[1] >= pos[1
] and card.position[2] <= pos[0] and card.position[3
] <= pos[1]:
player.cards.remove(card)
player.played_card = True
player.last_played_card = card
player.turn = False
pygame.display.update()
while run:
pos = -5, -5
clock.tick(60)
game = n.send(player)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:
pos = pygame.mouse.get_pos()
redraw_window(WIN)
main()
<|reserved_special_token_1|>
import pygame
import os
from network import Network
from card import Card
from game import Game, Player
pygame.font.init()
WIDTH, HEIGHT = 700, 800
WIN = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption('Zole')
CARD_WIDTH = 60
def get_card_size(card_width, image):
card_height = image.get_height() / (image.get_width() / card_width)
return round(card_height)
CARD_IMAGE_BACK_GRAY = pygame.image.load(os.path.join('images',
'gray_back.png'))
CARD_HEIGHT = get_card_size(CARD_WIDTH, CARD_IMAGE_BACK_GRAY)
CARD_IMAGE_BACK_GRAY = pygame.transform.scale(CARD_IMAGE_BACK_GRAY, (
CARD_WIDTH, CARD_HEIGHT))
def upload_card_images(card_name):
card_n = pygame.image.load(os.path.join('images', card_name + '.png'))
card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))
return card_n
CARD_NAMES = ['AC', 'AH', 'AS', 'AD', 'KS', 'KH', 'KD', 'KC', 'QS', 'QH',
'QD', 'QC', 'JS', 'JH', 'JD', 'JC', '10S', '10H', '10D', '10C', '9S',
'9H', '9D', '9C', '8D', '7D']
CARD_IMAGES = {}
for name in CARD_NAMES:
CARD_IMAGES[name] = upload_card_images(name)
STRENGTH_SCALE_TRUMPS = ['QC', 'QS', 'QH', 'QD', 'JC', 'JS', 'JH', 'JD',
'AD', '10D', 'KD', '9D', '8D', '7D', 'AC', '10C', 'KC', '9C', 'AH',
'10H', 'KH', '9H', 'AS', '10S', 'KS', '9S', 'None']
STRENGTH_SCALE_NON_TRUMPS = ['A', '10', 'K', '9', 'None']
def draw_player(win, x, y, width, height, cards, card_images):
i = 0
for card in cards:
win.blit(card_images[card.name], (x + i * width, y))
card.position = x + i * width, y, x + i * width + width, y + height
i += 1
def draw_opponents(win, x, y, width, height, back_image, count, hor=True):
if hor:
for i in range(count):
win.blit(back_image, (x + i * width, y))
else:
for i in range(count):
win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *
height))
def draw_played_cards(win, cards, card_images, turn_order):
position = [(300, 300), (315, 260), (330, 300)]
counter = turn_order
for _ in range(len(cards)):
win.blit(card_images[cards[0].name], position[counter])
turn_order = (counter + 1) % 3
def main():
run = True
clock = pygame.time.Clock()
main_font = pygame.font.SysFont('comicsans', 30)
n = Network()
player = n.connect()
def redraw_window(win):
win.fill((53, 101, 77))
draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,
CARD_IMAGES)
draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8)
draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,
CARD_IMAGE_BACK_GRAY, 8, hor=False)
draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.
turn_order)
if player.turn == True:
for card in player.Cards:
if card.position[0] >= pos[0] and card.position[1] >= pos[1
] and card.position[2] <= pos[0] and card.position[3
] <= pos[1]:
player.cards.remove(card)
player.played_card = True
player.last_played_card = card
player.turn = False
pygame.display.update()
while run:
pos = -5, -5
clock.tick(60)
game = n.send(player)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:
pos = pygame.mouse.get_pos()
redraw_window(WIN)
main()
<|reserved_special_token_1|>
import pygame
import os
from network import Network
from card import Card
from game import Game, Player
pygame.font.init()
# Initializing window
WIDTH, HEIGHT = 700, 800
WIN = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Zole")
CARD_WIDTH = 60
############################## Uploading cards
def get_card_size(card_width, image):
card_height = image.get_height() / (image.get_width()/card_width)
return round(card_height)
CARD_IMAGE_BACK_GRAY = pygame.image.load(
os.path.join("images", "gray_back.png"))
CARD_HEIGHT = get_card_size(CARD_WIDTH, CARD_IMAGE_BACK_GRAY)
# Uploading backside of cards
CARD_IMAGE_BACK_GRAY = pygame.transform.scale(
CARD_IMAGE_BACK_GRAY, (CARD_WIDTH, CARD_HEIGHT))
# Uploading all the cards
def upload_card_images(card_name):
card_n = pygame.image.load(os.path.join("images", card_name + ".png"))
card_n = pygame.transform.scale(
card_n, (CARD_WIDTH, CARD_HEIGHT))
return card_n
CARD_NAMES = ["AC", "AH", "AS", "AD", "KS", "KH", "KD", "KC", "QS", "QH", "QD", "QC", "JS", "JH", "JD", "JC", "10S", "10H", "10D",
"10C", "9S", "9H", "9D", "9C", "8D", "7D"]
CARD_IMAGES = {}
# Uploading all card images in dictionary
for name in CARD_NAMES:
CARD_IMAGES[name] = upload_card_images(name)
############################## Uploading cards End
# Card strengths
STRENGTH_SCALE_TRUMPS = ["QC", "QS", "QH", "QD", "JC", "JS", "JH", "JD", "AD", "10D", "KD", "9D", "8D", "7D", "AC", "10C", "KC", "9C",
"AH", "10H", "KH", "9H", "AS", "10S", "KS", "9S", "None"]
STRENGTH_SCALE_NON_TRUMPS = ["A", "10", "K", "9", "None"]
def draw_player(win,x, y,width,height, cards, card_images):
i = 0
for card in cards:
win.blit(card_images[card.name], (x + i * width, y))
card.position = (x + i * width, y, x +
i * width + width, y + height)
i += 1
def draw_opponents(win,x, y,width,height,back_image,count, hor = True):
if hor:
for i in range(count):
win.blit(back_image, (x + i * width, y))
else:
for i in range(count):
win.blit(pygame.transform.rotate(back_image, 90), (x , y + i * height))
def draw_played_cards(win, cards, card_images, turn_order):
position = [(300,300),(315, 260),(330,300)]
counter = turn_order
for _ in range(len(cards)):
win.blit(card_images[cards[0].name], (position[counter]))
turn_order = (counter + 1) % 3
def main():
run = True
clock = pygame.time.Clock()
main_font = pygame.font.SysFont("comicsans", 30)
n = Network()
player = n.connect()
def redraw_window(win):
win.fill((53, 101, 77))
draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,CARD_IMAGES)
draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH, CARD_IMAGE_BACK_GRAY, 8)
draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH, CARD_IMAGE_BACK_GRAY, 8, hor = False)
draw_played_cards(win,game.played_cards_round, CARD_IMAGES, game.turn_order)
if player.turn == True:
for card in player.Cards:
if card.position[0] >= pos[0] and card.position[1] >= pos[1] and card.position[2] <= pos[0] and card.position[3] <= pos[1]:
player.cards.remove(card)
player.played_card = True
player.last_played_card = card
player.turn = False
pygame.display.update()
while run:
pos = (-5, -5)
clock.tick(60)
game = n.send(player)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:
pos = pygame.mouse.get_pos()
redraw_window(WIN)
main()
|
flexible
|
{
"blob_id": "9c478c59398618d0e447276f9ff6c1c143702f12",
"index": 2360,
"step-1": "<mask token>\n\n\ndef get_card_size(card_width, image):\n card_height = image.get_height() / (image.get_width() / card_width)\n return round(card_height)\n\n\n<mask token>\n\n\ndef upload_card_images(card_name):\n card_n = pygame.image.load(os.path.join('images', card_name + '.png'))\n card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))\n return card_n\n\n\n<mask token>\n\n\ndef draw_player(win, x, y, width, height, cards, card_images):\n i = 0\n for card in cards:\n win.blit(card_images[card.name], (x + i * width, y))\n card.position = x + i * width, y, x + i * width + width, y + height\n i += 1\n\n\ndef draw_opponents(win, x, y, width, height, back_image, count, hor=True):\n if hor:\n for i in range(count):\n win.blit(back_image, (x + i * width, y))\n else:\n for i in range(count):\n win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *\n height))\n\n\ndef draw_played_cards(win, cards, card_images, turn_order):\n position = [(300, 300), (315, 260), (330, 300)]\n counter = turn_order\n for _ in range(len(cards)):\n win.blit(card_images[cards[0].name], position[counter])\n turn_order = (counter + 1) % 3\n\n\ndef main():\n run = True\n clock = pygame.time.Clock()\n main_font = pygame.font.SysFont('comicsans', 30)\n n = Network()\n player = n.connect()\n\n def redraw_window(win):\n win.fill((53, 101, 77))\n draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,\n CARD_IMAGES)\n draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8)\n draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8, hor=False)\n draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.\n turn_order)\n if player.turn == True:\n for card in player.Cards:\n if card.position[0] >= pos[0] and card.position[1] >= pos[1\n ] and card.position[2] <= pos[0] and card.position[3\n ] <= pos[1]:\n player.cards.remove(card)\n player.played_card = True\n player.last_played_card = card\n player.turn = False\n pygame.display.update()\n while run:\n pos = -5, -5\n clock.tick(60)\n game = n.send(player)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n quit()\n if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:\n pos = pygame.mouse.get_pos()\n redraw_window(WIN)\n\n\n<mask token>\n",
"step-2": "<mask token>\npygame.font.init()\n<mask token>\npygame.display.set_caption('Zole')\n<mask token>\n\n\ndef get_card_size(card_width, image):\n card_height = image.get_height() / (image.get_width() / card_width)\n return round(card_height)\n\n\n<mask token>\n\n\ndef upload_card_images(card_name):\n card_n = pygame.image.load(os.path.join('images', card_name + '.png'))\n card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))\n return card_n\n\n\n<mask token>\nfor name in CARD_NAMES:\n CARD_IMAGES[name] = upload_card_images(name)\n<mask token>\n\n\ndef draw_player(win, x, y, width, height, cards, card_images):\n i = 0\n for card in cards:\n win.blit(card_images[card.name], (x + i * width, y))\n card.position = x + i * width, y, x + i * width + width, y + height\n i += 1\n\n\ndef draw_opponents(win, x, y, width, height, back_image, count, hor=True):\n if hor:\n for i in range(count):\n win.blit(back_image, (x + i * width, y))\n else:\n for i in range(count):\n win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *\n height))\n\n\ndef draw_played_cards(win, cards, card_images, turn_order):\n position = [(300, 300), (315, 260), (330, 300)]\n counter = turn_order\n for _ in range(len(cards)):\n win.blit(card_images[cards[0].name], position[counter])\n turn_order = (counter + 1) % 3\n\n\ndef main():\n run = True\n clock = pygame.time.Clock()\n main_font = pygame.font.SysFont('comicsans', 30)\n n = Network()\n player = n.connect()\n\n def redraw_window(win):\n win.fill((53, 101, 77))\n draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,\n CARD_IMAGES)\n draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8)\n draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8, hor=False)\n draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.\n turn_order)\n if player.turn == True:\n for card in player.Cards:\n if card.position[0] >= pos[0] and card.position[1] >= pos[1\n ] and card.position[2] <= pos[0] and card.position[3\n ] <= pos[1]:\n player.cards.remove(card)\n player.played_card = True\n player.last_played_card = card\n player.turn = False\n pygame.display.update()\n while run:\n pos = -5, -5\n clock.tick(60)\n game = n.send(player)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n quit()\n if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:\n pos = pygame.mouse.get_pos()\n redraw_window(WIN)\n\n\nmain()\n",
"step-3": "<mask token>\npygame.font.init()\nWIDTH, HEIGHT = 700, 800\nWIN = pygame.display.set_mode((WIDTH, HEIGHT))\npygame.display.set_caption('Zole')\nCARD_WIDTH = 60\n\n\ndef get_card_size(card_width, image):\n card_height = image.get_height() / (image.get_width() / card_width)\n return round(card_height)\n\n\nCARD_IMAGE_BACK_GRAY = pygame.image.load(os.path.join('images',\n 'gray_back.png'))\nCARD_HEIGHT = get_card_size(CARD_WIDTH, CARD_IMAGE_BACK_GRAY)\nCARD_IMAGE_BACK_GRAY = pygame.transform.scale(CARD_IMAGE_BACK_GRAY, (\n CARD_WIDTH, CARD_HEIGHT))\n\n\ndef upload_card_images(card_name):\n card_n = pygame.image.load(os.path.join('images', card_name + '.png'))\n card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))\n return card_n\n\n\nCARD_NAMES = ['AC', 'AH', 'AS', 'AD', 'KS', 'KH', 'KD', 'KC', 'QS', 'QH',\n 'QD', 'QC', 'JS', 'JH', 'JD', 'JC', '10S', '10H', '10D', '10C', '9S',\n '9H', '9D', '9C', '8D', '7D']\nCARD_IMAGES = {}\nfor name in CARD_NAMES:\n CARD_IMAGES[name] = upload_card_images(name)\nSTRENGTH_SCALE_TRUMPS = ['QC', 'QS', 'QH', 'QD', 'JC', 'JS', 'JH', 'JD',\n 'AD', '10D', 'KD', '9D', '8D', '7D', 'AC', '10C', 'KC', '9C', 'AH',\n '10H', 'KH', '9H', 'AS', '10S', 'KS', '9S', 'None']\nSTRENGTH_SCALE_NON_TRUMPS = ['A', '10', 'K', '9', 'None']\n\n\ndef draw_player(win, x, y, width, height, cards, card_images):\n i = 0\n for card in cards:\n win.blit(card_images[card.name], (x + i * width, y))\n card.position = x + i * width, y, x + i * width + width, y + height\n i += 1\n\n\ndef draw_opponents(win, x, y, width, height, back_image, count, hor=True):\n if hor:\n for i in range(count):\n win.blit(back_image, (x + i * width, y))\n else:\n for i in range(count):\n win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *\n height))\n\n\ndef draw_played_cards(win, cards, card_images, turn_order):\n position = [(300, 300), (315, 260), (330, 300)]\n counter = turn_order\n for _ in range(len(cards)):\n win.blit(card_images[cards[0].name], position[counter])\n turn_order = (counter + 1) % 3\n\n\ndef main():\n run = True\n clock = pygame.time.Clock()\n main_font = pygame.font.SysFont('comicsans', 30)\n n = Network()\n player = n.connect()\n\n def redraw_window(win):\n win.fill((53, 101, 77))\n draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,\n CARD_IMAGES)\n draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8)\n draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8, hor=False)\n draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.\n turn_order)\n if player.turn == True:\n for card in player.Cards:\n if card.position[0] >= pos[0] and card.position[1] >= pos[1\n ] and card.position[2] <= pos[0] and card.position[3\n ] <= pos[1]:\n player.cards.remove(card)\n player.played_card = True\n player.last_played_card = card\n player.turn = False\n pygame.display.update()\n while run:\n pos = -5, -5\n clock.tick(60)\n game = n.send(player)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n quit()\n if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:\n pos = pygame.mouse.get_pos()\n redraw_window(WIN)\n\n\nmain()\n",
"step-4": "import pygame\nimport os\nfrom network import Network\nfrom card import Card\nfrom game import Game, Player\npygame.font.init()\nWIDTH, HEIGHT = 700, 800\nWIN = pygame.display.set_mode((WIDTH, HEIGHT))\npygame.display.set_caption('Zole')\nCARD_WIDTH = 60\n\n\ndef get_card_size(card_width, image):\n card_height = image.get_height() / (image.get_width() / card_width)\n return round(card_height)\n\n\nCARD_IMAGE_BACK_GRAY = pygame.image.load(os.path.join('images',\n 'gray_back.png'))\nCARD_HEIGHT = get_card_size(CARD_WIDTH, CARD_IMAGE_BACK_GRAY)\nCARD_IMAGE_BACK_GRAY = pygame.transform.scale(CARD_IMAGE_BACK_GRAY, (\n CARD_WIDTH, CARD_HEIGHT))\n\n\ndef upload_card_images(card_name):\n card_n = pygame.image.load(os.path.join('images', card_name + '.png'))\n card_n = pygame.transform.scale(card_n, (CARD_WIDTH, CARD_HEIGHT))\n return card_n\n\n\nCARD_NAMES = ['AC', 'AH', 'AS', 'AD', 'KS', 'KH', 'KD', 'KC', 'QS', 'QH',\n 'QD', 'QC', 'JS', 'JH', 'JD', 'JC', '10S', '10H', '10D', '10C', '9S',\n '9H', '9D', '9C', '8D', '7D']\nCARD_IMAGES = {}\nfor name in CARD_NAMES:\n CARD_IMAGES[name] = upload_card_images(name)\nSTRENGTH_SCALE_TRUMPS = ['QC', 'QS', 'QH', 'QD', 'JC', 'JS', 'JH', 'JD',\n 'AD', '10D', 'KD', '9D', '8D', '7D', 'AC', '10C', 'KC', '9C', 'AH',\n '10H', 'KH', '9H', 'AS', '10S', 'KS', '9S', 'None']\nSTRENGTH_SCALE_NON_TRUMPS = ['A', '10', 'K', '9', 'None']\n\n\ndef draw_player(win, x, y, width, height, cards, card_images):\n i = 0\n for card in cards:\n win.blit(card_images[card.name], (x + i * width, y))\n card.position = x + i * width, y, x + i * width + width, y + height\n i += 1\n\n\ndef draw_opponents(win, x, y, width, height, back_image, count, hor=True):\n if hor:\n for i in range(count):\n win.blit(back_image, (x + i * width, y))\n else:\n for i in range(count):\n win.blit(pygame.transform.rotate(back_image, 90), (x, y + i *\n height))\n\n\ndef draw_played_cards(win, cards, card_images, turn_order):\n position = [(300, 300), (315, 260), (330, 300)]\n counter = turn_order\n for _ in range(len(cards)):\n win.blit(card_images[cards[0].name], position[counter])\n turn_order = (counter + 1) % 3\n\n\ndef main():\n run = True\n clock = pygame.time.Clock()\n main_font = pygame.font.SysFont('comicsans', 30)\n n = Network()\n player = n.connect()\n\n def redraw_window(win):\n win.fill((53, 101, 77))\n draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,\n CARD_IMAGES)\n draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8)\n draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH,\n CARD_IMAGE_BACK_GRAY, 8, hor=False)\n draw_played_cards(win, game.played_cards_round, CARD_IMAGES, game.\n turn_order)\n if player.turn == True:\n for card in player.Cards:\n if card.position[0] >= pos[0] and card.position[1] >= pos[1\n ] and card.position[2] <= pos[0] and card.position[3\n ] <= pos[1]:\n player.cards.remove(card)\n player.played_card = True\n player.last_played_card = card\n player.turn = False\n pygame.display.update()\n while run:\n pos = -5, -5\n clock.tick(60)\n game = n.send(player)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n quit()\n if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:\n pos = pygame.mouse.get_pos()\n redraw_window(WIN)\n\n\nmain()\n",
"step-5": "import pygame\nimport os\nfrom network import Network\nfrom card import Card\nfrom game import Game, Player\npygame.font.init()\n\n# Initializing window\nWIDTH, HEIGHT = 700, 800\nWIN = pygame.display.set_mode((WIDTH, HEIGHT))\npygame.display.set_caption(\"Zole\")\n\nCARD_WIDTH = 60\n\n############################## Uploading cards\ndef get_card_size(card_width, image):\n card_height = image.get_height() / (image.get_width()/card_width)\n return round(card_height)\n\nCARD_IMAGE_BACK_GRAY = pygame.image.load(\n os.path.join(\"images\", \"gray_back.png\"))\n\nCARD_HEIGHT = get_card_size(CARD_WIDTH, CARD_IMAGE_BACK_GRAY)\n\n# Uploading backside of cards\nCARD_IMAGE_BACK_GRAY = pygame.transform.scale(\n CARD_IMAGE_BACK_GRAY, (CARD_WIDTH, CARD_HEIGHT))\n\n# Uploading all the cards\ndef upload_card_images(card_name):\n card_n = pygame.image.load(os.path.join(\"images\", card_name + \".png\"))\n card_n = pygame.transform.scale(\n card_n, (CARD_WIDTH, CARD_HEIGHT))\n return card_n\n\n\nCARD_NAMES = [\"AC\", \"AH\", \"AS\", \"AD\", \"KS\", \"KH\", \"KD\", \"KC\", \"QS\", \"QH\", \"QD\", \"QC\", \"JS\", \"JH\", \"JD\", \"JC\", \"10S\", \"10H\", \"10D\",\n \"10C\", \"9S\", \"9H\", \"9D\", \"9C\", \"8D\", \"7D\"]\n\nCARD_IMAGES = {}\n\n# Uploading all card images in dictionary\nfor name in CARD_NAMES:\n CARD_IMAGES[name] = upload_card_images(name)\n\n############################## Uploading cards End\n\n\n# Card strengths\n\nSTRENGTH_SCALE_TRUMPS = [\"QC\", \"QS\", \"QH\", \"QD\", \"JC\", \"JS\", \"JH\", \"JD\", \"AD\", \"10D\", \"KD\", \"9D\", \"8D\", \"7D\", \"AC\", \"10C\", \"KC\", \"9C\",\n \"AH\", \"10H\", \"KH\", \"9H\", \"AS\", \"10S\", \"KS\", \"9S\", \"None\"]\n\nSTRENGTH_SCALE_NON_TRUMPS = [\"A\", \"10\", \"K\", \"9\", \"None\"]\n\n\ndef draw_player(win,x, y,width,height, cards, card_images):\n i = 0\n for card in cards:\n win.blit(card_images[card.name], (x + i * width, y))\n card.position = (x + i * width, y, x +\n i * width + width, y + height)\n i += 1\n\ndef draw_opponents(win,x, y,width,height,back_image,count, hor = True):\n if hor:\n for i in range(count):\n win.blit(back_image, (x + i * width, y))\n\n else:\n for i in range(count):\n win.blit(pygame.transform.rotate(back_image, 90), (x , y + i * height))\n\ndef draw_played_cards(win, cards, card_images, turn_order):\n position = [(300,300),(315, 260),(330,300)]\n counter = turn_order\n for _ in range(len(cards)):\n win.blit(card_images[cards[0].name], (position[counter]))\n turn_order = (counter + 1) % 3\n\n\n\n\n\ndef main():\n run = True\n clock = pygame.time.Clock()\n main_font = pygame.font.SysFont(\"comicsans\", 30)\n n = Network()\n\n player = n.connect()\n\n\n def redraw_window(win):\n win.fill((53, 101, 77))\n\n\n draw_player(win, 60, 650, CARD_WIDTH, CARD_HEIGHT, player.cards,CARD_IMAGES)\n draw_opponents(win, 60, 150, CARD_WIDTH, CARD_WIDTH, CARD_IMAGE_BACK_GRAY, 8)\n draw_opponents(win, 550, 150, CARD_WIDTH, CARD_WIDTH, CARD_IMAGE_BACK_GRAY, 8, hor = False)\n draw_played_cards(win,game.played_cards_round, CARD_IMAGES, game.turn_order)\n\n if player.turn == True:\n for card in player.Cards:\n if card.position[0] >= pos[0] and card.position[1] >= pos[1] and card.position[2] <= pos[0] and card.position[3] <= pos[1]:\n player.cards.remove(card)\n player.played_card = True\n player.last_played_card = card\n player.turn = False\n\n\n pygame.display.update()\n\n while run:\n pos = (-5, -5)\n clock.tick(60)\n game = n.send(player)\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n quit()\n if event.type == pygame.MOUSEBUTTONDOWN and player.turn == True:\n pos = pygame.mouse.get_pos()\n\n redraw_window(WIN)\n\n\nmain()\n\n\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
class Solution:
def divide(self, dividend, divisor):
"""
:type dividend: int
:type divisor: int
:rtype: int
"""
negative = (dividend < 0) ^ (divisor < 0)
dividend, divisor = abs(dividend), abs(divisor)
result = 0
while dividend >= divisor:
shift_time = 1
while dividend >= divisor << shift_time:
shift_time += 1
dividend -= divisor << (shift_time - 1)
result += 1 << (shift_time - 1)
if negative:
result = -result
if (-1 << 31) <= result <= (1 << 31) - 1:
return result
return (1 << 31) - 1
if __name__ == '__main__':
print(Solution().divide(-2147483648, -1))
|
normal
|
{
"blob_id": "4a0213351f8e9dcb2c6e71317a5ff1064974652e",
"index": 3418,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n\n\n<mask token>\n",
"step-3": "class Solution:\n\n def divide(self, dividend, divisor):\n \"\"\"\n :type dividend: int\n :type divisor: int\n :rtype: int\n \"\"\"\n negative = (dividend < 0) ^ (divisor < 0)\n dividend, divisor = abs(dividend), abs(divisor)\n result = 0\n while dividend >= divisor:\n shift_time = 1\n while dividend >= divisor << shift_time:\n shift_time += 1\n dividend -= divisor << shift_time - 1\n result += 1 << shift_time - 1\n if negative:\n result = -result\n if -1 << 31 <= result <= (1 << 31) - 1:\n return result\n return (1 << 31) - 1\n\n\n<mask token>\n",
"step-4": "class Solution:\n\n def divide(self, dividend, divisor):\n \"\"\"\n :type dividend: int\n :type divisor: int\n :rtype: int\n \"\"\"\n negative = (dividend < 0) ^ (divisor < 0)\n dividend, divisor = abs(dividend), abs(divisor)\n result = 0\n while dividend >= divisor:\n shift_time = 1\n while dividend >= divisor << shift_time:\n shift_time += 1\n dividend -= divisor << shift_time - 1\n result += 1 << shift_time - 1\n if negative:\n result = -result\n if -1 << 31 <= result <= (1 << 31) - 1:\n return result\n return (1 << 31) - 1\n\n\nif __name__ == '__main__':\n print(Solution().divide(-2147483648, -1))\n",
"step-5": "class Solution:\n def divide(self, dividend, divisor):\n \"\"\"\n :type dividend: int\n :type divisor: int\n :rtype: int\n \"\"\"\n negative = (dividend < 0) ^ (divisor < 0)\n dividend, divisor = abs(dividend), abs(divisor)\n\n result = 0\n while dividend >= divisor:\n shift_time = 1\n while dividend >= divisor << shift_time:\n shift_time += 1\n dividend -= divisor << (shift_time - 1)\n result += 1 << (shift_time - 1)\n\n if negative:\n result = -result\n\n if (-1 << 31) <= result <= (1 << 31) - 1:\n return result\n return (1 << 31) - 1\n\n\nif __name__ == '__main__':\n print(Solution().divide(-2147483648, -1))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def wait_condition(cond, timeout=1, sleeptime=0.01):
"""Wait for condition to return anything other than None
"""
if timeout is None:
timeout = 1
if timeout < sleeptime:
print('Warning, timeout cannot be smaller than', sleeptime)
timeout = sleeptime
tries = int(timeout / sleeptime)
for i in range(tries):
val = cond()
if val is not None:
break
sleep(sleeptime)
return val
<|reserved_special_token_0|>
def _queue_output(arguments, pidq, outputq):
"""Read/Write output/input of given process.
This function is meant to be executed in a thread as it may block
"""
kwargs = arguments['process']
input = arguments['input']
try:
proc = Popen(**kwargs)
except OSError as e:
pidq.put(None)
outputq.put(('',
"Unexpected exception caught during execution: '{0}' . ".format
(e), 255))
return
pidq.put(proc.pid)
out, err = proc.communicate(input)
out, err = out.decode('utf-8'), err.decode('utf-8')
outputq.put((out, err, proc.returncode))
def _retrieve_output(thread, timeout, queue, thread_error):
"""Fetch output from binary subprocess queues
"""
thread.join(timeout)
if thread.isAlive():
raise TimeoutWaitingFor(thread_error + '. Unexpected error')
try:
data = queue.get(timeout=timeout)
except Empty:
data = TimeoutWaitingFor('streams from program')
return data
def _get_output(arguments, timeout=None):
"""Collect output from the subprocess without blocking the main process if
subprocess hangs.
"""
output_timeout = 0.1
pidq = Queue()
outputq = Queue()
t = Thread(target=_queue_output, args=(arguments, pidq, outputq))
t.daemon = True
t.start()
try:
pid = pidq.get(timeout=timeout)
except Empty:
pid = None
if pid is None:
return _retrieve_output(t, output_timeout, outputq, 'Program to start')
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program thread to join')
for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):
try:
os.kill(pid, signal.SIGABRT)
except OSError as e:
if e.errno != 3:
raise
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program to die')
raise OSError("Program stopped responding and couldn't be killed")
<|reserved_special_token_0|>
def memoize(obj):
"""Keep an in-memory cache of function results given its inputs
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
<|reserved_special_token_0|>
def mkstemp(data):
"""
Create a temporary file that is removed at process exit
"""
def rmtemp(name):
try:
os.remove(name)
except OSError:
pass
f = tempfile.NamedTemporaryFile(delete=False)
f.write(data)
f.close()
atexit.register(rmtemp, f.name)
return f.name
def mkstemp_exec(data):
"""Create a temporary executable file that is removed at process exit
"""
name = mkstemp(data)
os.chmod(name, 493)
return name
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def wait_condition(cond, timeout=1, sleeptime=0.01):
"""Wait for condition to return anything other than None
"""
if timeout is None:
timeout = 1
if timeout < sleeptime:
print('Warning, timeout cannot be smaller than', sleeptime)
timeout = sleeptime
tries = int(timeout / sleeptime)
for i in range(tries):
val = cond()
if val is not None:
break
sleep(sleeptime)
return val
def wait_process(pid, timeout=None):
"""Wait for process to finish
"""
def process():
try:
os.kill(pid, 0)
except OSError:
return True
else:
return None
return wait_condition(process, timeout)
def _queue_output(arguments, pidq, outputq):
"""Read/Write output/input of given process.
This function is meant to be executed in a thread as it may block
"""
kwargs = arguments['process']
input = arguments['input']
try:
proc = Popen(**kwargs)
except OSError as e:
pidq.put(None)
outputq.put(('',
"Unexpected exception caught during execution: '{0}' . ".format
(e), 255))
return
pidq.put(proc.pid)
out, err = proc.communicate(input)
out, err = out.decode('utf-8'), err.decode('utf-8')
outputq.put((out, err, proc.returncode))
def _retrieve_output(thread, timeout, queue, thread_error):
"""Fetch output from binary subprocess queues
"""
thread.join(timeout)
if thread.isAlive():
raise TimeoutWaitingFor(thread_error + '. Unexpected error')
try:
data = queue.get(timeout=timeout)
except Empty:
data = TimeoutWaitingFor('streams from program')
return data
def _get_output(arguments, timeout=None):
"""Collect output from the subprocess without blocking the main process if
subprocess hangs.
"""
output_timeout = 0.1
pidq = Queue()
outputq = Queue()
t = Thread(target=_queue_output, args=(arguments, pidq, outputq))
t.daemon = True
t.start()
try:
pid = pidq.get(timeout=timeout)
except Empty:
pid = None
if pid is None:
return _retrieve_output(t, output_timeout, outputq, 'Program to start')
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program thread to join')
for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):
try:
os.kill(pid, signal.SIGABRT)
except OSError as e:
if e.errno != 3:
raise
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program to die')
raise OSError("Program stopped responding and couldn't be killed")
def run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE, merge_streams=
False, env=os.environ, timeout=None):
"""Run a subprocess and wait for it to finish"""
if input is None:
stdin = None
else:
stdin = PIPE
if merge_streams:
stderr = STDOUT
else:
stderr = PIPE
arguments = {'process': {'args': cmd, 'stdin': stdin, 'stdout': stdout,
'stderr': stderr, 'bufsize': 1, 'close_fds': ON_POSIX, 'env': env},
'input': input}
out, err, exit = _get_output(arguments, timeout)
if merge_streams:
if exit != 0:
raise CommandError(cmd, exit, out)
else:
return exit, out
elif exit != 0:
raise CommandError(cmd, exit, out, err)
else:
return exit, out, err
def run_cmd_wait_nofail(*args, **kwargs):
"""Same as run_cmd_wait but silence the exception if it happens"""
try:
return run_cmd_wait(*args, **kwargs)
except CommandError as e:
return e.code, e.out, e.err
def memoize(obj):
"""Keep an in-memory cache of function results given its inputs
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
<|reserved_special_token_0|>
def parse_datafile(file):
"""Parse .data files, treating files as JSON
"""
data = []
with open(file) as fh:
for line in fh:
line = line.rstrip('\n')
if line.startswith('[') and line.endswith(']'):
line = '{' + line[1:-1] + '}'
if line.startswith('{'):
data.append(json.loads(line))
else:
data.append(line)
return data
def mkstemp(data):
"""
Create a temporary file that is removed at process exit
"""
def rmtemp(name):
try:
os.remove(name)
except OSError:
pass
f = tempfile.NamedTemporaryFile(delete=False)
f.write(data)
f.close()
atexit.register(rmtemp, f.name)
return f.name
def mkstemp_exec(data):
"""Create a temporary executable file that is removed at process exit
"""
name = mkstemp(data)
os.chmod(name, 493)
return name
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def shared_binary_location(cmd='shared'):
""" ../src/ is used by default.
"""
return os.path.join(BIN_PREFIX, cmd)
return binary_location(cmd, SHARED_USE_PATH)
def binary_location(cmd, USE_PATH=False):
""" ../src/ is used by default.
"""
return os.path.join(BIN_PREFIX, cmd)
def wait_condition(cond, timeout=1, sleeptime=0.01):
"""Wait for condition to return anything other than None
"""
if timeout is None:
timeout = 1
if timeout < sleeptime:
print('Warning, timeout cannot be smaller than', sleeptime)
timeout = sleeptime
tries = int(timeout / sleeptime)
for i in range(tries):
val = cond()
if val is not None:
break
sleep(sleeptime)
return val
def wait_process(pid, timeout=None):
"""Wait for process to finish
"""
def process():
try:
os.kill(pid, 0)
except OSError:
return True
else:
return None
return wait_condition(process, timeout)
def _queue_output(arguments, pidq, outputq):
"""Read/Write output/input of given process.
This function is meant to be executed in a thread as it may block
"""
kwargs = arguments['process']
input = arguments['input']
try:
proc = Popen(**kwargs)
except OSError as e:
pidq.put(None)
outputq.put(('',
"Unexpected exception caught during execution: '{0}' . ".format
(e), 255))
return
pidq.put(proc.pid)
out, err = proc.communicate(input)
out, err = out.decode('utf-8'), err.decode('utf-8')
outputq.put((out, err, proc.returncode))
def _retrieve_output(thread, timeout, queue, thread_error):
"""Fetch output from binary subprocess queues
"""
thread.join(timeout)
if thread.isAlive():
raise TimeoutWaitingFor(thread_error + '. Unexpected error')
try:
data = queue.get(timeout=timeout)
except Empty:
data = TimeoutWaitingFor('streams from program')
return data
def _get_output(arguments, timeout=None):
"""Collect output from the subprocess without blocking the main process if
subprocess hangs.
"""
output_timeout = 0.1
pidq = Queue()
outputq = Queue()
t = Thread(target=_queue_output, args=(arguments, pidq, outputq))
t.daemon = True
t.start()
try:
pid = pidq.get(timeout=timeout)
except Empty:
pid = None
if pid is None:
return _retrieve_output(t, output_timeout, outputq, 'Program to start')
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program thread to join')
for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):
try:
os.kill(pid, signal.SIGABRT)
except OSError as e:
if e.errno != 3:
raise
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program to die')
raise OSError("Program stopped responding and couldn't be killed")
def run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE, merge_streams=
False, env=os.environ, timeout=None):
"""Run a subprocess and wait for it to finish"""
if input is None:
stdin = None
else:
stdin = PIPE
if merge_streams:
stderr = STDOUT
else:
stderr = PIPE
arguments = {'process': {'args': cmd, 'stdin': stdin, 'stdout': stdout,
'stderr': stderr, 'bufsize': 1, 'close_fds': ON_POSIX, 'env': env},
'input': input}
out, err, exit = _get_output(arguments, timeout)
if merge_streams:
if exit != 0:
raise CommandError(cmd, exit, out)
else:
return exit, out
elif exit != 0:
raise CommandError(cmd, exit, out, err)
else:
return exit, out, err
def run_cmd_wait_nofail(*args, **kwargs):
"""Same as run_cmd_wait but silence the exception if it happens"""
try:
return run_cmd_wait(*args, **kwargs)
except CommandError as e:
return e.code, e.out, e.err
def memoize(obj):
"""Keep an in-memory cache of function results given its inputs
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
<|reserved_special_token_0|>
def parse_datafile(file):
"""Parse .data files, treating files as JSON
"""
data = []
with open(file) as fh:
for line in fh:
line = line.rstrip('\n')
if line.startswith('[') and line.endswith(']'):
line = '{' + line[1:-1] + '}'
if line.startswith('{'):
data.append(json.loads(line))
else:
data.append(line)
return data
def mkstemp(data):
"""
Create a temporary file that is removed at process exit
"""
def rmtemp(name):
try:
os.remove(name)
except OSError:
pass
f = tempfile.NamedTemporaryFile(delete=False)
f.write(data)
f.close()
atexit.register(rmtemp, f.name)
return f.name
def mkstemp_exec(data):
"""Create a temporary executable file that is removed at process exit
"""
name = mkstemp(data)
os.chmod(name, 493)
return name
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ON_POSIX = 'posix' in sys.builtin_module_names
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
BIN_PREFIX = os.path.abspath(os.path.join(CURRENT_DIR, '..', '..', 'src'))
DEFAULT_CERT_PATH = os.path.abspath(os.path.join(CURRENT_DIR, '..',
'test_certs'))
DEFAULT_EXTENSION_PATH = os.path.abspath(os.path.join(CURRENT_DIR, '..',
'test_extensions'))
SHARED_SKIP = os.environ.get('SHARED_SKIP', False)
SHARED_USE_PATH = os.environ.get('SHARED_USE_PATH', False)
UUID_REGEXP = '[0-9A-Fa-f]{8}-' + '[0-9A-Fa-f]{4}-' * 3 + '[0-9A-Fa-f]{12}'
def shared_binary_location(cmd='shared'):
""" ../src/ is used by default.
"""
return os.path.join(BIN_PREFIX, cmd)
return binary_location(cmd, SHARED_USE_PATH)
def binary_location(cmd, USE_PATH=False):
""" ../src/ is used by default.
"""
return os.path.join(BIN_PREFIX, cmd)
def wait_condition(cond, timeout=1, sleeptime=0.01):
"""Wait for condition to return anything other than None
"""
if timeout is None:
timeout = 1
if timeout < sleeptime:
print('Warning, timeout cannot be smaller than', sleeptime)
timeout = sleeptime
tries = int(timeout / sleeptime)
for i in range(tries):
val = cond()
if val is not None:
break
sleep(sleeptime)
return val
def wait_process(pid, timeout=None):
"""Wait for process to finish
"""
def process():
try:
os.kill(pid, 0)
except OSError:
return True
else:
return None
return wait_condition(process, timeout)
def _queue_output(arguments, pidq, outputq):
"""Read/Write output/input of given process.
This function is meant to be executed in a thread as it may block
"""
kwargs = arguments['process']
input = arguments['input']
try:
proc = Popen(**kwargs)
except OSError as e:
pidq.put(None)
outputq.put(('',
"Unexpected exception caught during execution: '{0}' . ".format
(e), 255))
return
pidq.put(proc.pid)
out, err = proc.communicate(input)
out, err = out.decode('utf-8'), err.decode('utf-8')
outputq.put((out, err, proc.returncode))
def _retrieve_output(thread, timeout, queue, thread_error):
"""Fetch output from binary subprocess queues
"""
thread.join(timeout)
if thread.isAlive():
raise TimeoutWaitingFor(thread_error + '. Unexpected error')
try:
data = queue.get(timeout=timeout)
except Empty:
data = TimeoutWaitingFor('streams from program')
return data
def _get_output(arguments, timeout=None):
"""Collect output from the subprocess without blocking the main process if
subprocess hangs.
"""
output_timeout = 0.1
pidq = Queue()
outputq = Queue()
t = Thread(target=_queue_output, args=(arguments, pidq, outputq))
t.daemon = True
t.start()
try:
pid = pidq.get(timeout=timeout)
except Empty:
pid = None
if pid is None:
return _retrieve_output(t, output_timeout, outputq, 'Program to start')
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program thread to join')
for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):
try:
os.kill(pid, signal.SIGABRT)
except OSError as e:
if e.errno != 3:
raise
state = wait_process(pid, timeout)
if state:
return _retrieve_output(t, output_timeout, outputq,
'Program to die')
raise OSError("Program stopped responding and couldn't be killed")
def run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE, merge_streams=
False, env=os.environ, timeout=None):
"""Run a subprocess and wait for it to finish"""
if input is None:
stdin = None
else:
stdin = PIPE
if merge_streams:
stderr = STDOUT
else:
stderr = PIPE
arguments = {'process': {'args': cmd, 'stdin': stdin, 'stdout': stdout,
'stderr': stderr, 'bufsize': 1, 'close_fds': ON_POSIX, 'env': env},
'input': input}
out, err, exit = _get_output(arguments, timeout)
if merge_streams:
if exit != 0:
raise CommandError(cmd, exit, out)
else:
return exit, out
elif exit != 0:
raise CommandError(cmd, exit, out, err)
else:
return exit, out, err
def run_cmd_wait_nofail(*args, **kwargs):
"""Same as run_cmd_wait but silence the exception if it happens"""
try:
return run_cmd_wait(*args, **kwargs)
except CommandError as e:
return e.code, e.out, e.err
def memoize(obj):
"""Keep an in-memory cache of function results given its inputs
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
<|reserved_special_token_0|>
which = memoize(which)
def parse_datafile(file):
"""Parse .data files, treating files as JSON
"""
data = []
with open(file) as fh:
for line in fh:
line = line.rstrip('\n')
if line.startswith('[') and line.endswith(']'):
line = '{' + line[1:-1] + '}'
if line.startswith('{'):
data.append(json.loads(line))
else:
data.append(line)
return data
def mkstemp(data):
"""
Create a temporary file that is removed at process exit
"""
def rmtemp(name):
try:
os.remove(name)
except OSError:
pass
f = tempfile.NamedTemporaryFile(delete=False)
f.write(data)
f.close()
atexit.register(rmtemp, f.name)
return f.name
def mkstemp_exec(data):
"""Create a temporary executable file that is removed at process exit
"""
name = mkstemp(data)
os.chmod(name, 493)
return name
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
import os
import sys
import socket
import signal
import functools
import atexit
import tempfile
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
from queue import Queue, Empty
from time import sleep
import json
from .exceptions import CommandError, TimeoutWaitingFor
ON_POSIX = 'posix' in sys.builtin_module_names
# Directory relative to basetest module location
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
# Location of binary files (usually the src/ folder)
BIN_PREFIX = os.path.abspath(
os.path.join(CURRENT_DIR, "..", "..", "src")
)
# Default location of test certificates
DEFAULT_CERT_PATH = os.path.abspath(
os.path.join(CURRENT_DIR, "..", "test_certs")
)
# Default location of test extensions
DEFAULT_EXTENSION_PATH = os.path.abspath(
os.path.join(CURRENT_DIR, "..", "test_extensions")
)
# Environment flags to control skipping of shared tests
SHARED_SKIP = os.environ.get("SHARED_SKIP", False)
# Environment flags to control use of PATH or in-tree binaries
SHARED_USE_PATH = os.environ.get("SHARED_USE_PATH", False)
UUID_REGEXP = ("[0-9A-Fa-f]{8}-" + ("[0-9A-Fa-f]{4}-" * 3) + "[0-9A-Fa-f]{12}")
def shared_binary_location(cmd="shared"):
""" ../src/ is used by default.
"""
return os.path.join(BIN_PREFIX, cmd)
return binary_location(cmd, SHARED_USE_PATH)
def binary_location(cmd, USE_PATH=False):
""" ../src/ is used by default.
"""
return os.path.join(BIN_PREFIX, cmd)
def wait_condition(cond, timeout=1, sleeptime=.01):
"""Wait for condition to return anything other than None
"""
# NOTE Increasing sleeptime can dramatically increase testsuite runtime
# It also reduces CPU load significantly
if timeout is None:
timeout = 1
if timeout < sleeptime:
print("Warning, timeout cannot be smaller than", sleeptime)
timeout = sleeptime
# Max number of attempts until giving up
tries = int(timeout / sleeptime)
for i in range(tries):
val = cond()
if val is not None:
break
sleep(sleeptime)
return val
def wait_process(pid, timeout=None):
"""Wait for process to finish
"""
def process():
try:
os.kill(pid, 0)
except OSError:
# Process is dead
return True
else:
# Process is still ticking
return None
return wait_condition(process, timeout)
def _queue_output(arguments, pidq, outputq):
"""Read/Write output/input of given process.
This function is meant to be executed in a thread as it may block
"""
kwargs = arguments["process"]
input = arguments["input"]
try:
proc = Popen(**kwargs)
except OSError as e:
# pid None is read by the main thread as a crash of the process
pidq.put(None)
outputq.put((
"",
("Unexpected exception caught during execution: '{0}' . ".format(e)),
255)) # false exitcode
return
# Put the PID in the queue for main process to know.
pidq.put(proc.pid)
# Send input and wait for finish
out, err = proc.communicate(input)
out, err = out.decode('utf-8'), err.decode('utf-8')
# Give the output back to the caller
outputq.put((out, err, proc.returncode))
def _retrieve_output(thread, timeout, queue, thread_error):
"""Fetch output from binary subprocess queues
"""
# Try to join the thread on failure abort
thread.join(timeout)
if thread.isAlive():
# Join should have killed the thread. This is unexpected
raise TimeoutWaitingFor(thread_error + ". Unexpected error")
# Thread died so we should have output
try:
# data = (stdout, stderr, exitcode)
data = queue.get(timeout=timeout)
except Empty:
data = TimeoutWaitingFor("streams from program")
return data
def _get_output(arguments, timeout=None):
"""Collect output from the subprocess without blocking the main process if
subprocess hangs.
"""
# NOTE Increase this value if tests fail with None being received as
# stdout/stderr instead of the expected content
output_timeout = 0.1 # seconds
pidq = Queue()
outputq = Queue()
t = Thread(target=_queue_output, args=(arguments, pidq, outputq))
t.daemon = True
t.start()
try:
pid = pidq.get(timeout=timeout)
except Empty:
pid = None
# Process crashed or timed out for some reason
if pid is None:
return _retrieve_output(t, output_timeout, outputq,
"Program to start")
# Wait for process to finish (normal execution)
state = wait_process(pid, timeout)
if state:
# Process finished
return _retrieve_output(t, output_timeout, outputq,
"Program thread to join")
# If we reach this point we assume the process got stuck or timed out
for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):
# Start with lower signals and escalate if process ignores them
try:
os.kill(pid, signal.SIGABRT)
except OSError as e:
# 3 means the process finished/died between last check and now
if e.errno != 3:
raise
# Wait for process to finish (should die/exit after signal)
state = wait_process(pid, timeout)
if state:
# Process finished
return _retrieve_output(t, output_timeout, outputq,
"Program to die")
# This should never happen but in case something goes really bad
raise OSError("Program stopped responding and couldn't be killed")
def run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE,
merge_streams=False, env=os.environ, timeout=None):
"Run a subprocess and wait for it to finish"
if input is None:
stdin = None
else:
stdin = PIPE
if merge_streams:
stderr = STDOUT
else:
stderr = PIPE
arguments = {
"process": {
"args": cmd,
"stdin": stdin,
"stdout": stdout,
"stderr": stderr,
"bufsize": 1,
"close_fds": ON_POSIX,
"env": env,
},
"input": input,
}
out, err, exit = _get_output(arguments, timeout)
if merge_streams:
if exit != 0:
raise CommandError(cmd, exit, out)
else:
return exit, out
else:
if exit != 0:
raise CommandError(cmd, exit, out, err)
else:
return exit, out, err
def run_cmd_wait_nofail(*args, **kwargs):
"""Same as run_cmd_wait but silence the exception if it happens"""
try:
return run_cmd_wait(*args, **kwargs)
except CommandError as e:
return e.code, e.out, e.err
def memoize(obj):
"""Keep an in-memory cache of function results given its inputs
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
from shutil import which
which = memoize(which)
def parse_datafile(file):
"""Parse .data files, treating files as JSON
"""
data = []
with open(file) as fh:
for line in fh:
line = line.rstrip("\n")
# Turn [] strings into {} to be treated properly as JSON hashes
if line.startswith('[') and line.endswith(']'):
line = '{' + line[1:-1] + '}'
if line.startswith("{"):
data.append(json.loads(line))
else:
data.append(line)
return data
def mkstemp(data):
"""
Create a temporary file that is removed at process exit
"""
def rmtemp(name):
try:
os.remove(name)
except OSError:
pass
f = tempfile.NamedTemporaryFile(delete=False)
f.write(data)
f.close()
# Ensure removal at end of python session
atexit.register(rmtemp, f.name)
return f.name
def mkstemp_exec(data):
"""Create a temporary executable file that is removed at process exit
"""
name = mkstemp(data)
os.chmod(name, 0o755)
return name
# vim: ai sts=4 et sw=4
|
flexible
|
{
"blob_id": "7f220a970d65a91228501f7db59089e6c0604fb5",
"index": 9915,
"step-1": "<mask token>\n\n\ndef wait_condition(cond, timeout=1, sleeptime=0.01):\n \"\"\"Wait for condition to return anything other than None\n \"\"\"\n if timeout is None:\n timeout = 1\n if timeout < sleeptime:\n print('Warning, timeout cannot be smaller than', sleeptime)\n timeout = sleeptime\n tries = int(timeout / sleeptime)\n for i in range(tries):\n val = cond()\n if val is not None:\n break\n sleep(sleeptime)\n return val\n\n\n<mask token>\n\n\ndef _queue_output(arguments, pidq, outputq):\n \"\"\"Read/Write output/input of given process.\n This function is meant to be executed in a thread as it may block\n \"\"\"\n kwargs = arguments['process']\n input = arguments['input']\n try:\n proc = Popen(**kwargs)\n except OSError as e:\n pidq.put(None)\n outputq.put(('',\n \"Unexpected exception caught during execution: '{0}' . \".format\n (e), 255))\n return\n pidq.put(proc.pid)\n out, err = proc.communicate(input)\n out, err = out.decode('utf-8'), err.decode('utf-8')\n outputq.put((out, err, proc.returncode))\n\n\ndef _retrieve_output(thread, timeout, queue, thread_error):\n \"\"\"Fetch output from binary subprocess queues\n \"\"\"\n thread.join(timeout)\n if thread.isAlive():\n raise TimeoutWaitingFor(thread_error + '. Unexpected error')\n try:\n data = queue.get(timeout=timeout)\n except Empty:\n data = TimeoutWaitingFor('streams from program')\n return data\n\n\ndef _get_output(arguments, timeout=None):\n \"\"\"Collect output from the subprocess without blocking the main process if\n subprocess hangs.\n \"\"\"\n output_timeout = 0.1\n pidq = Queue()\n outputq = Queue()\n t = Thread(target=_queue_output, args=(arguments, pidq, outputq))\n t.daemon = True\n t.start()\n try:\n pid = pidq.get(timeout=timeout)\n except Empty:\n pid = None\n if pid is None:\n return _retrieve_output(t, output_timeout, outputq, 'Program to start')\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program thread to join')\n for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):\n try:\n os.kill(pid, signal.SIGABRT)\n except OSError as e:\n if e.errno != 3:\n raise\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program to die')\n raise OSError(\"Program stopped responding and couldn't be killed\")\n\n\n<mask token>\n\n\ndef memoize(obj):\n \"\"\"Keep an in-memory cache of function results given its inputs\n \"\"\"\n cache = obj.cache = {}\n\n @functools.wraps(obj)\n def memoizer(*args, **kwargs):\n key = str(args) + str(kwargs)\n if key not in cache:\n cache[key] = obj(*args, **kwargs)\n return cache[key]\n return memoizer\n\n\n<mask token>\n\n\ndef mkstemp(data):\n \"\"\"\n Create a temporary file that is removed at process exit\n \"\"\"\n\n def rmtemp(name):\n try:\n os.remove(name)\n except OSError:\n pass\n f = tempfile.NamedTemporaryFile(delete=False)\n f.write(data)\n f.close()\n atexit.register(rmtemp, f.name)\n return f.name\n\n\ndef mkstemp_exec(data):\n \"\"\"Create a temporary executable file that is removed at process exit\n \"\"\"\n name = mkstemp(data)\n os.chmod(name, 493)\n return name\n",
"step-2": "<mask token>\n\n\ndef wait_condition(cond, timeout=1, sleeptime=0.01):\n \"\"\"Wait for condition to return anything other than None\n \"\"\"\n if timeout is None:\n timeout = 1\n if timeout < sleeptime:\n print('Warning, timeout cannot be smaller than', sleeptime)\n timeout = sleeptime\n tries = int(timeout / sleeptime)\n for i in range(tries):\n val = cond()\n if val is not None:\n break\n sleep(sleeptime)\n return val\n\n\ndef wait_process(pid, timeout=None):\n \"\"\"Wait for process to finish\n \"\"\"\n\n def process():\n try:\n os.kill(pid, 0)\n except OSError:\n return True\n else:\n return None\n return wait_condition(process, timeout)\n\n\ndef _queue_output(arguments, pidq, outputq):\n \"\"\"Read/Write output/input of given process.\n This function is meant to be executed in a thread as it may block\n \"\"\"\n kwargs = arguments['process']\n input = arguments['input']\n try:\n proc = Popen(**kwargs)\n except OSError as e:\n pidq.put(None)\n outputq.put(('',\n \"Unexpected exception caught during execution: '{0}' . \".format\n (e), 255))\n return\n pidq.put(proc.pid)\n out, err = proc.communicate(input)\n out, err = out.decode('utf-8'), err.decode('utf-8')\n outputq.put((out, err, proc.returncode))\n\n\ndef _retrieve_output(thread, timeout, queue, thread_error):\n \"\"\"Fetch output from binary subprocess queues\n \"\"\"\n thread.join(timeout)\n if thread.isAlive():\n raise TimeoutWaitingFor(thread_error + '. Unexpected error')\n try:\n data = queue.get(timeout=timeout)\n except Empty:\n data = TimeoutWaitingFor('streams from program')\n return data\n\n\ndef _get_output(arguments, timeout=None):\n \"\"\"Collect output from the subprocess without blocking the main process if\n subprocess hangs.\n \"\"\"\n output_timeout = 0.1\n pidq = Queue()\n outputq = Queue()\n t = Thread(target=_queue_output, args=(arguments, pidq, outputq))\n t.daemon = True\n t.start()\n try:\n pid = pidq.get(timeout=timeout)\n except Empty:\n pid = None\n if pid is None:\n return _retrieve_output(t, output_timeout, outputq, 'Program to start')\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program thread to join')\n for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):\n try:\n os.kill(pid, signal.SIGABRT)\n except OSError as e:\n if e.errno != 3:\n raise\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program to die')\n raise OSError(\"Program stopped responding and couldn't be killed\")\n\n\ndef run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE, merge_streams=\n False, env=os.environ, timeout=None):\n \"\"\"Run a subprocess and wait for it to finish\"\"\"\n if input is None:\n stdin = None\n else:\n stdin = PIPE\n if merge_streams:\n stderr = STDOUT\n else:\n stderr = PIPE\n arguments = {'process': {'args': cmd, 'stdin': stdin, 'stdout': stdout,\n 'stderr': stderr, 'bufsize': 1, 'close_fds': ON_POSIX, 'env': env},\n 'input': input}\n out, err, exit = _get_output(arguments, timeout)\n if merge_streams:\n if exit != 0:\n raise CommandError(cmd, exit, out)\n else:\n return exit, out\n elif exit != 0:\n raise CommandError(cmd, exit, out, err)\n else:\n return exit, out, err\n\n\ndef run_cmd_wait_nofail(*args, **kwargs):\n \"\"\"Same as run_cmd_wait but silence the exception if it happens\"\"\"\n try:\n return run_cmd_wait(*args, **kwargs)\n except CommandError as e:\n return e.code, e.out, e.err\n\n\ndef memoize(obj):\n \"\"\"Keep an in-memory cache of function results given its inputs\n \"\"\"\n cache = obj.cache = {}\n\n @functools.wraps(obj)\n def memoizer(*args, **kwargs):\n key = str(args) + str(kwargs)\n if key not in cache:\n cache[key] = obj(*args, **kwargs)\n return cache[key]\n return memoizer\n\n\n<mask token>\n\n\ndef parse_datafile(file):\n \"\"\"Parse .data files, treating files as JSON\n \"\"\"\n data = []\n with open(file) as fh:\n for line in fh:\n line = line.rstrip('\\n')\n if line.startswith('[') and line.endswith(']'):\n line = '{' + line[1:-1] + '}'\n if line.startswith('{'):\n data.append(json.loads(line))\n else:\n data.append(line)\n return data\n\n\ndef mkstemp(data):\n \"\"\"\n Create a temporary file that is removed at process exit\n \"\"\"\n\n def rmtemp(name):\n try:\n os.remove(name)\n except OSError:\n pass\n f = tempfile.NamedTemporaryFile(delete=False)\n f.write(data)\n f.close()\n atexit.register(rmtemp, f.name)\n return f.name\n\n\ndef mkstemp_exec(data):\n \"\"\"Create a temporary executable file that is removed at process exit\n \"\"\"\n name = mkstemp(data)\n os.chmod(name, 493)\n return name\n",
"step-3": "<mask token>\n\n\ndef shared_binary_location(cmd='shared'):\n \"\"\" ../src/ is used by default.\n \"\"\"\n return os.path.join(BIN_PREFIX, cmd)\n return binary_location(cmd, SHARED_USE_PATH)\n\n\ndef binary_location(cmd, USE_PATH=False):\n \"\"\" ../src/ is used by default.\n \"\"\"\n return os.path.join(BIN_PREFIX, cmd)\n\n\ndef wait_condition(cond, timeout=1, sleeptime=0.01):\n \"\"\"Wait for condition to return anything other than None\n \"\"\"\n if timeout is None:\n timeout = 1\n if timeout < sleeptime:\n print('Warning, timeout cannot be smaller than', sleeptime)\n timeout = sleeptime\n tries = int(timeout / sleeptime)\n for i in range(tries):\n val = cond()\n if val is not None:\n break\n sleep(sleeptime)\n return val\n\n\ndef wait_process(pid, timeout=None):\n \"\"\"Wait for process to finish\n \"\"\"\n\n def process():\n try:\n os.kill(pid, 0)\n except OSError:\n return True\n else:\n return None\n return wait_condition(process, timeout)\n\n\ndef _queue_output(arguments, pidq, outputq):\n \"\"\"Read/Write output/input of given process.\n This function is meant to be executed in a thread as it may block\n \"\"\"\n kwargs = arguments['process']\n input = arguments['input']\n try:\n proc = Popen(**kwargs)\n except OSError as e:\n pidq.put(None)\n outputq.put(('',\n \"Unexpected exception caught during execution: '{0}' . \".format\n (e), 255))\n return\n pidq.put(proc.pid)\n out, err = proc.communicate(input)\n out, err = out.decode('utf-8'), err.decode('utf-8')\n outputq.put((out, err, proc.returncode))\n\n\ndef _retrieve_output(thread, timeout, queue, thread_error):\n \"\"\"Fetch output from binary subprocess queues\n \"\"\"\n thread.join(timeout)\n if thread.isAlive():\n raise TimeoutWaitingFor(thread_error + '. Unexpected error')\n try:\n data = queue.get(timeout=timeout)\n except Empty:\n data = TimeoutWaitingFor('streams from program')\n return data\n\n\ndef _get_output(arguments, timeout=None):\n \"\"\"Collect output from the subprocess without blocking the main process if\n subprocess hangs.\n \"\"\"\n output_timeout = 0.1\n pidq = Queue()\n outputq = Queue()\n t = Thread(target=_queue_output, args=(arguments, pidq, outputq))\n t.daemon = True\n t.start()\n try:\n pid = pidq.get(timeout=timeout)\n except Empty:\n pid = None\n if pid is None:\n return _retrieve_output(t, output_timeout, outputq, 'Program to start')\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program thread to join')\n for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):\n try:\n os.kill(pid, signal.SIGABRT)\n except OSError as e:\n if e.errno != 3:\n raise\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program to die')\n raise OSError(\"Program stopped responding and couldn't be killed\")\n\n\ndef run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE, merge_streams=\n False, env=os.environ, timeout=None):\n \"\"\"Run a subprocess and wait for it to finish\"\"\"\n if input is None:\n stdin = None\n else:\n stdin = PIPE\n if merge_streams:\n stderr = STDOUT\n else:\n stderr = PIPE\n arguments = {'process': {'args': cmd, 'stdin': stdin, 'stdout': stdout,\n 'stderr': stderr, 'bufsize': 1, 'close_fds': ON_POSIX, 'env': env},\n 'input': input}\n out, err, exit = _get_output(arguments, timeout)\n if merge_streams:\n if exit != 0:\n raise CommandError(cmd, exit, out)\n else:\n return exit, out\n elif exit != 0:\n raise CommandError(cmd, exit, out, err)\n else:\n return exit, out, err\n\n\ndef run_cmd_wait_nofail(*args, **kwargs):\n \"\"\"Same as run_cmd_wait but silence the exception if it happens\"\"\"\n try:\n return run_cmd_wait(*args, **kwargs)\n except CommandError as e:\n return e.code, e.out, e.err\n\n\ndef memoize(obj):\n \"\"\"Keep an in-memory cache of function results given its inputs\n \"\"\"\n cache = obj.cache = {}\n\n @functools.wraps(obj)\n def memoizer(*args, **kwargs):\n key = str(args) + str(kwargs)\n if key not in cache:\n cache[key] = obj(*args, **kwargs)\n return cache[key]\n return memoizer\n\n\n<mask token>\n\n\ndef parse_datafile(file):\n \"\"\"Parse .data files, treating files as JSON\n \"\"\"\n data = []\n with open(file) as fh:\n for line in fh:\n line = line.rstrip('\\n')\n if line.startswith('[') and line.endswith(']'):\n line = '{' + line[1:-1] + '}'\n if line.startswith('{'):\n data.append(json.loads(line))\n else:\n data.append(line)\n return data\n\n\ndef mkstemp(data):\n \"\"\"\n Create a temporary file that is removed at process exit\n \"\"\"\n\n def rmtemp(name):\n try:\n os.remove(name)\n except OSError:\n pass\n f = tempfile.NamedTemporaryFile(delete=False)\n f.write(data)\n f.close()\n atexit.register(rmtemp, f.name)\n return f.name\n\n\ndef mkstemp_exec(data):\n \"\"\"Create a temporary executable file that is removed at process exit\n \"\"\"\n name = mkstemp(data)\n os.chmod(name, 493)\n return name\n",
"step-4": "<mask token>\nON_POSIX = 'posix' in sys.builtin_module_names\nCURRENT_DIR = os.path.dirname(os.path.abspath(__file__))\nBIN_PREFIX = os.path.abspath(os.path.join(CURRENT_DIR, '..', '..', 'src'))\nDEFAULT_CERT_PATH = os.path.abspath(os.path.join(CURRENT_DIR, '..',\n 'test_certs'))\nDEFAULT_EXTENSION_PATH = os.path.abspath(os.path.join(CURRENT_DIR, '..',\n 'test_extensions'))\nSHARED_SKIP = os.environ.get('SHARED_SKIP', False)\nSHARED_USE_PATH = os.environ.get('SHARED_USE_PATH', False)\nUUID_REGEXP = '[0-9A-Fa-f]{8}-' + '[0-9A-Fa-f]{4}-' * 3 + '[0-9A-Fa-f]{12}'\n\n\ndef shared_binary_location(cmd='shared'):\n \"\"\" ../src/ is used by default.\n \"\"\"\n return os.path.join(BIN_PREFIX, cmd)\n return binary_location(cmd, SHARED_USE_PATH)\n\n\ndef binary_location(cmd, USE_PATH=False):\n \"\"\" ../src/ is used by default.\n \"\"\"\n return os.path.join(BIN_PREFIX, cmd)\n\n\ndef wait_condition(cond, timeout=1, sleeptime=0.01):\n \"\"\"Wait for condition to return anything other than None\n \"\"\"\n if timeout is None:\n timeout = 1\n if timeout < sleeptime:\n print('Warning, timeout cannot be smaller than', sleeptime)\n timeout = sleeptime\n tries = int(timeout / sleeptime)\n for i in range(tries):\n val = cond()\n if val is not None:\n break\n sleep(sleeptime)\n return val\n\n\ndef wait_process(pid, timeout=None):\n \"\"\"Wait for process to finish\n \"\"\"\n\n def process():\n try:\n os.kill(pid, 0)\n except OSError:\n return True\n else:\n return None\n return wait_condition(process, timeout)\n\n\ndef _queue_output(arguments, pidq, outputq):\n \"\"\"Read/Write output/input of given process.\n This function is meant to be executed in a thread as it may block\n \"\"\"\n kwargs = arguments['process']\n input = arguments['input']\n try:\n proc = Popen(**kwargs)\n except OSError as e:\n pidq.put(None)\n outputq.put(('',\n \"Unexpected exception caught during execution: '{0}' . \".format\n (e), 255))\n return\n pidq.put(proc.pid)\n out, err = proc.communicate(input)\n out, err = out.decode('utf-8'), err.decode('utf-8')\n outputq.put((out, err, proc.returncode))\n\n\ndef _retrieve_output(thread, timeout, queue, thread_error):\n \"\"\"Fetch output from binary subprocess queues\n \"\"\"\n thread.join(timeout)\n if thread.isAlive():\n raise TimeoutWaitingFor(thread_error + '. Unexpected error')\n try:\n data = queue.get(timeout=timeout)\n except Empty:\n data = TimeoutWaitingFor('streams from program')\n return data\n\n\ndef _get_output(arguments, timeout=None):\n \"\"\"Collect output from the subprocess without blocking the main process if\n subprocess hangs.\n \"\"\"\n output_timeout = 0.1\n pidq = Queue()\n outputq = Queue()\n t = Thread(target=_queue_output, args=(arguments, pidq, outputq))\n t.daemon = True\n t.start()\n try:\n pid = pidq.get(timeout=timeout)\n except Empty:\n pid = None\n if pid is None:\n return _retrieve_output(t, output_timeout, outputq, 'Program to start')\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program thread to join')\n for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):\n try:\n os.kill(pid, signal.SIGABRT)\n except OSError as e:\n if e.errno != 3:\n raise\n state = wait_process(pid, timeout)\n if state:\n return _retrieve_output(t, output_timeout, outputq,\n 'Program to die')\n raise OSError(\"Program stopped responding and couldn't be killed\")\n\n\ndef run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE, merge_streams=\n False, env=os.environ, timeout=None):\n \"\"\"Run a subprocess and wait for it to finish\"\"\"\n if input is None:\n stdin = None\n else:\n stdin = PIPE\n if merge_streams:\n stderr = STDOUT\n else:\n stderr = PIPE\n arguments = {'process': {'args': cmd, 'stdin': stdin, 'stdout': stdout,\n 'stderr': stderr, 'bufsize': 1, 'close_fds': ON_POSIX, 'env': env},\n 'input': input}\n out, err, exit = _get_output(arguments, timeout)\n if merge_streams:\n if exit != 0:\n raise CommandError(cmd, exit, out)\n else:\n return exit, out\n elif exit != 0:\n raise CommandError(cmd, exit, out, err)\n else:\n return exit, out, err\n\n\ndef run_cmd_wait_nofail(*args, **kwargs):\n \"\"\"Same as run_cmd_wait but silence the exception if it happens\"\"\"\n try:\n return run_cmd_wait(*args, **kwargs)\n except CommandError as e:\n return e.code, e.out, e.err\n\n\ndef memoize(obj):\n \"\"\"Keep an in-memory cache of function results given its inputs\n \"\"\"\n cache = obj.cache = {}\n\n @functools.wraps(obj)\n def memoizer(*args, **kwargs):\n key = str(args) + str(kwargs)\n if key not in cache:\n cache[key] = obj(*args, **kwargs)\n return cache[key]\n return memoizer\n\n\n<mask token>\nwhich = memoize(which)\n\n\ndef parse_datafile(file):\n \"\"\"Parse .data files, treating files as JSON\n \"\"\"\n data = []\n with open(file) as fh:\n for line in fh:\n line = line.rstrip('\\n')\n if line.startswith('[') and line.endswith(']'):\n line = '{' + line[1:-1] + '}'\n if line.startswith('{'):\n data.append(json.loads(line))\n else:\n data.append(line)\n return data\n\n\ndef mkstemp(data):\n \"\"\"\n Create a temporary file that is removed at process exit\n \"\"\"\n\n def rmtemp(name):\n try:\n os.remove(name)\n except OSError:\n pass\n f = tempfile.NamedTemporaryFile(delete=False)\n f.write(data)\n f.close()\n atexit.register(rmtemp, f.name)\n return f.name\n\n\ndef mkstemp_exec(data):\n \"\"\"Create a temporary executable file that is removed at process exit\n \"\"\"\n name = mkstemp(data)\n os.chmod(name, 493)\n return name\n",
"step-5": "# -*- coding: utf-8 -*-\nimport os\nimport sys\nimport socket\nimport signal\nimport functools\nimport atexit\nimport tempfile\nfrom subprocess import Popen, PIPE, STDOUT\nfrom threading import Thread\nfrom queue import Queue, Empty\nfrom time import sleep\nimport json\nfrom .exceptions import CommandError, TimeoutWaitingFor\n\nON_POSIX = 'posix' in sys.builtin_module_names\n\n# Directory relative to basetest module location\nCURRENT_DIR = os.path.dirname(os.path.abspath(__file__))\n\n# Location of binary files (usually the src/ folder)\nBIN_PREFIX = os.path.abspath(\n os.path.join(CURRENT_DIR, \"..\", \"..\", \"src\")\n)\n\n# Default location of test certificates\nDEFAULT_CERT_PATH = os.path.abspath(\n os.path.join(CURRENT_DIR, \"..\", \"test_certs\")\n)\n\n# Default location of test extensions\nDEFAULT_EXTENSION_PATH = os.path.abspath(\n os.path.join(CURRENT_DIR, \"..\", \"test_extensions\")\n)\n\n\n# Environment flags to control skipping of shared tests\nSHARED_SKIP = os.environ.get(\"SHARED_SKIP\", False)\n# Environment flags to control use of PATH or in-tree binaries\nSHARED_USE_PATH = os.environ.get(\"SHARED_USE_PATH\", False)\n\nUUID_REGEXP = (\"[0-9A-Fa-f]{8}-\" + (\"[0-9A-Fa-f]{4}-\" * 3) + \"[0-9A-Fa-f]{12}\")\n\n\ndef shared_binary_location(cmd=\"shared\"):\n \"\"\" ../src/ is used by default.\n \"\"\"\n return os.path.join(BIN_PREFIX, cmd)\n return binary_location(cmd, SHARED_USE_PATH)\n\n\ndef binary_location(cmd, USE_PATH=False):\n \"\"\" ../src/ is used by default.\n \"\"\"\n return os.path.join(BIN_PREFIX, cmd)\n\n\ndef wait_condition(cond, timeout=1, sleeptime=.01):\n \"\"\"Wait for condition to return anything other than None\n \"\"\"\n # NOTE Increasing sleeptime can dramatically increase testsuite runtime\n # It also reduces CPU load significantly\n if timeout is None:\n timeout = 1\n\n if timeout < sleeptime:\n print(\"Warning, timeout cannot be smaller than\", sleeptime)\n timeout = sleeptime\n\n # Max number of attempts until giving up\n tries = int(timeout / sleeptime)\n\n for i in range(tries):\n val = cond()\n\n if val is not None:\n break\n\n sleep(sleeptime)\n\n return val\n\n\ndef wait_process(pid, timeout=None):\n \"\"\"Wait for process to finish\n \"\"\"\n def process():\n try:\n os.kill(pid, 0)\n except OSError:\n # Process is dead\n return True\n else:\n # Process is still ticking\n return None\n\n return wait_condition(process, timeout)\n\n\ndef _queue_output(arguments, pidq, outputq):\n \"\"\"Read/Write output/input of given process.\n This function is meant to be executed in a thread as it may block\n \"\"\"\n kwargs = arguments[\"process\"]\n input = arguments[\"input\"]\n\n try:\n proc = Popen(**kwargs)\n except OSError as e:\n # pid None is read by the main thread as a crash of the process\n pidq.put(None)\n\n outputq.put((\n \"\",\n (\"Unexpected exception caught during execution: '{0}' . \".format(e)),\n 255)) # false exitcode\n\n return\n\n # Put the PID in the queue for main process to know.\n pidq.put(proc.pid)\n\n # Send input and wait for finish\n out, err = proc.communicate(input)\n\n out, err = out.decode('utf-8'), err.decode('utf-8')\n\n # Give the output back to the caller\n outputq.put((out, err, proc.returncode))\n\n\ndef _retrieve_output(thread, timeout, queue, thread_error):\n \"\"\"Fetch output from binary subprocess queues\n \"\"\"\n # Try to join the thread on failure abort\n thread.join(timeout)\n if thread.isAlive():\n # Join should have killed the thread. This is unexpected\n raise TimeoutWaitingFor(thread_error + \". Unexpected error\")\n\n # Thread died so we should have output\n try:\n # data = (stdout, stderr, exitcode)\n data = queue.get(timeout=timeout)\n except Empty:\n data = TimeoutWaitingFor(\"streams from program\")\n\n return data\n\n\ndef _get_output(arguments, timeout=None):\n \"\"\"Collect output from the subprocess without blocking the main process if\n subprocess hangs.\n \"\"\"\n # NOTE Increase this value if tests fail with None being received as\n # stdout/stderr instead of the expected content\n output_timeout = 0.1 # seconds\n\n pidq = Queue()\n outputq = Queue()\n\n t = Thread(target=_queue_output, args=(arguments, pidq, outputq))\n t.daemon = True\n t.start()\n\n try:\n pid = pidq.get(timeout=timeout)\n except Empty:\n pid = None\n\n # Process crashed or timed out for some reason\n if pid is None:\n return _retrieve_output(t, output_timeout, outputq,\n \"Program to start\")\n\n # Wait for process to finish (normal execution)\n state = wait_process(pid, timeout)\n\n if state:\n # Process finished\n return _retrieve_output(t, output_timeout, outputq,\n \"Program thread to join\")\n\n # If we reach this point we assume the process got stuck or timed out\n for sig in (signal.SIGABRT, signal.SIGTERM, signal.SIGKILL):\n # Start with lower signals and escalate if process ignores them\n try:\n os.kill(pid, signal.SIGABRT)\n except OSError as e:\n # 3 means the process finished/died between last check and now\n if e.errno != 3:\n raise\n\n # Wait for process to finish (should die/exit after signal)\n state = wait_process(pid, timeout)\n\n if state:\n # Process finished\n return _retrieve_output(t, output_timeout, outputq,\n \"Program to die\")\n\n # This should never happen but in case something goes really bad\n raise OSError(\"Program stopped responding and couldn't be killed\")\n\n\ndef run_cmd_wait(cmd, input=None, stdout=PIPE, stderr=PIPE,\n merge_streams=False, env=os.environ, timeout=None):\n \"Run a subprocess and wait for it to finish\"\n\n if input is None:\n stdin = None\n else:\n stdin = PIPE\n\n if merge_streams:\n stderr = STDOUT\n else:\n stderr = PIPE\n\n arguments = {\n \"process\": {\n \"args\": cmd,\n \"stdin\": stdin,\n \"stdout\": stdout,\n \"stderr\": stderr,\n \"bufsize\": 1,\n \"close_fds\": ON_POSIX,\n \"env\": env,\n },\n \"input\": input,\n }\n out, err, exit = _get_output(arguments, timeout)\n\n if merge_streams:\n if exit != 0:\n raise CommandError(cmd, exit, out)\n else:\n return exit, out\n else:\n if exit != 0:\n raise CommandError(cmd, exit, out, err)\n else:\n return exit, out, err\n\n\ndef run_cmd_wait_nofail(*args, **kwargs):\n \"\"\"Same as run_cmd_wait but silence the exception if it happens\"\"\"\n try:\n return run_cmd_wait(*args, **kwargs)\n except CommandError as e:\n return e.code, e.out, e.err\n\n\ndef memoize(obj):\n \"\"\"Keep an in-memory cache of function results given its inputs\n \"\"\"\n cache = obj.cache = {}\n\n @functools.wraps(obj)\n def memoizer(*args, **kwargs):\n key = str(args) + str(kwargs)\n if key not in cache:\n cache[key] = obj(*args, **kwargs)\n return cache[key]\n return memoizer\n\n\nfrom shutil import which\nwhich = memoize(which)\n\n\ndef parse_datafile(file):\n \"\"\"Parse .data files, treating files as JSON\n \"\"\"\n data = []\n with open(file) as fh:\n for line in fh:\n line = line.rstrip(\"\\n\")\n\n # Turn [] strings into {} to be treated properly as JSON hashes\n if line.startswith('[') and line.endswith(']'):\n line = '{' + line[1:-1] + '}'\n\n if line.startswith(\"{\"):\n data.append(json.loads(line))\n else:\n data.append(line)\n return data\n\n\ndef mkstemp(data):\n \"\"\"\n Create a temporary file that is removed at process exit\n \"\"\"\n def rmtemp(name):\n try:\n os.remove(name)\n except OSError:\n pass\n\n f = tempfile.NamedTemporaryFile(delete=False)\n f.write(data)\n f.close()\n\n # Ensure removal at end of python session\n atexit.register(rmtemp, f.name)\n\n return f.name\n\n\ndef mkstemp_exec(data):\n \"\"\"Create a temporary executable file that is removed at process exit\n \"\"\"\n name = mkstemp(data)\n os.chmod(name, 0o755)\n\n return name\n\n# vim: ai sts=4 et sw=4\n",
"step-ids": [
7,
11,
13,
14,
16
]
}
|
[
7,
11,
13,
14,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
return random.randint(1, 6)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
return random.randint(1, 6)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import random
def main():
return random.randint(1, 6)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import random
def main():
#print('You rolled a die')
return random.randint(1,6)
if __name__== "__main__":
main()
|
flexible
|
{
"blob_id": "6d92b944ab8503d3635626c0c23021fc2b40dce3",
"index": 5732,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n return random.randint(1, 6)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n return random.randint(1, 6)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import random\n\n\ndef main():\n return random.randint(1, 6)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import random\n\ndef main():\n #print('You rolled a die')\n return random.randint(1,6)\n\nif __name__== \"__main__\":\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
'''
The while statement allows you to repeatedly execute a block of statements as long as a condition is true.
A while statement is an example of what is called a looping statement. A while statement can have an optional else clause.
'''
#Modifying the values using while loop in a list
l1: list = [1,2,3,4,5,6,7,8,9,10]
print("The original list: " , l1)
i=0
while (i < len(l1)):
l1[i] = l1[i] + 100
i=i+1
print("The modified new list is: ", l1)
#Guessing game using while-else loop
number = 23
while True:
guess = int(input('Enter an integer : ')) #input statement to enter data from console
if guess == number:
print('Congratulations, you guessed it.')
break
elif guess < number:
print('No, it is a little higher than that.')
continue
else:
print('No, it is a little lower than that.')
continue
# Do anything else you want to do here
print('Done')
|
normal
|
{
"blob_id": "6a3fd3323ed8792853afdf5af76161f3e20d4896",
"index": 4443,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nl1: list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nprint('The original list: ', l1)\n<mask token>\nwhile i < len(l1):\n l1[i] = l1[i] + 100\n i = i + 1\nprint('The modified new list is: ', l1)\n<mask token>\nwhile True:\n guess = int(input('Enter an integer : '))\n if guess == number:\n print('Congratulations, you guessed it.')\n break\n elif guess < number:\n print('No, it is a little higher than that.')\n continue\n else:\n print('No, it is a little lower than that.')\n continue\nprint('Done')\n",
"step-3": "<mask token>\nl1: list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nprint('The original list: ', l1)\ni = 0\nwhile i < len(l1):\n l1[i] = l1[i] + 100\n i = i + 1\nprint('The modified new list is: ', l1)\nnumber = 23\nwhile True:\n guess = int(input('Enter an integer : '))\n if guess == number:\n print('Congratulations, you guessed it.')\n break\n elif guess < number:\n print('No, it is a little higher than that.')\n continue\n else:\n print('No, it is a little lower than that.')\n continue\nprint('Done')\n",
"step-4": "'''\nThe while statement allows you to repeatedly execute a block of statements as long as a condition is true.\nA while statement is an example of what is called a looping statement. A while statement can have an optional else clause.\n'''\n\n#Modifying the values using while loop in a list\nl1: list = [1,2,3,4,5,6,7,8,9,10]\nprint(\"The original list: \" , l1)\n\ni=0\nwhile (i < len(l1)):\n l1[i] = l1[i] + 100\n i=i+1\nprint(\"The modified new list is: \", l1)\n\n#Guessing game using while-else loop\nnumber = 23\n\nwhile True:\n guess = int(input('Enter an integer : ')) #input statement to enter data from console\n if guess == number:\n print('Congratulations, you guessed it.')\n break\n elif guess < number:\n print('No, it is a little higher than that.')\n continue\n else:\n print('No, it is a little lower than that.')\n continue\n\n# Do anything else you want to do here\nprint('Done')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def fun(st,n):
suffix=[0 for i in range(n)]
prefix=[0 for i in range(n)]
count=0
for i,val in enumerate(st):
if(val=='*'):
if(i==0):
prefix[i]=0
count+=1
else:
prefix[i]=prefix[i-1]
count+=1
else:
if(i==0):
prefix[i]=0
count+=0
else:
prefix[i]=prefix[i-1]+count
count+=0
count=0
for i in range(n-1,-1,-1):
val=st[i]
if(val=='*'):
if(i==n-1):
suffix[i]=0
count+=1
else:
suffix[i]=suffix[i+1]
count+=1
else:
if(i==n-1):
suffix[i]=0
count+=0
else:
suffix[i]=suffix[i+1]+count
count+=0
ans=10**12
for i in range(n):
if(i!=n-1):
ans=min(ans,prefix[i]+suffix[i+1])
else:
ans=min(ans,prefix[i])
print(ans)
T = int(input())
for _ in range(T):
n=int(input())
st=input()
fun(st,n)
|
normal
|
{
"blob_id": "77c7ca3391426d1e56e15a93ef3e6227a45140fc",
"index": 2829,
"step-1": "<mask token>\n",
"step-2": "def fun(st, n):\n suffix = [(0) for i in range(n)]\n prefix = [(0) for i in range(n)]\n count = 0\n for i, val in enumerate(st):\n if val == '*':\n if i == 0:\n prefix[i] = 0\n count += 1\n else:\n prefix[i] = prefix[i - 1]\n count += 1\n elif i == 0:\n prefix[i] = 0\n count += 0\n else:\n prefix[i] = prefix[i - 1] + count\n count += 0\n count = 0\n for i in range(n - 1, -1, -1):\n val = st[i]\n if val == '*':\n if i == n - 1:\n suffix[i] = 0\n count += 1\n else:\n suffix[i] = suffix[i + 1]\n count += 1\n elif i == n - 1:\n suffix[i] = 0\n count += 0\n else:\n suffix[i] = suffix[i + 1] + count\n count += 0\n ans = 10 ** 12\n for i in range(n):\n if i != n - 1:\n ans = min(ans, prefix[i] + suffix[i + 1])\n else:\n ans = min(ans, prefix[i])\n print(ans)\n\n\n<mask token>\n",
"step-3": "def fun(st, n):\n suffix = [(0) for i in range(n)]\n prefix = [(0) for i in range(n)]\n count = 0\n for i, val in enumerate(st):\n if val == '*':\n if i == 0:\n prefix[i] = 0\n count += 1\n else:\n prefix[i] = prefix[i - 1]\n count += 1\n elif i == 0:\n prefix[i] = 0\n count += 0\n else:\n prefix[i] = prefix[i - 1] + count\n count += 0\n count = 0\n for i in range(n - 1, -1, -1):\n val = st[i]\n if val == '*':\n if i == n - 1:\n suffix[i] = 0\n count += 1\n else:\n suffix[i] = suffix[i + 1]\n count += 1\n elif i == n - 1:\n suffix[i] = 0\n count += 0\n else:\n suffix[i] = suffix[i + 1] + count\n count += 0\n ans = 10 ** 12\n for i in range(n):\n if i != n - 1:\n ans = min(ans, prefix[i] + suffix[i + 1])\n else:\n ans = min(ans, prefix[i])\n print(ans)\n\n\n<mask token>\nfor _ in range(T):\n n = int(input())\n st = input()\n fun(st, n)\n",
"step-4": "def fun(st, n):\n suffix = [(0) for i in range(n)]\n prefix = [(0) for i in range(n)]\n count = 0\n for i, val in enumerate(st):\n if val == '*':\n if i == 0:\n prefix[i] = 0\n count += 1\n else:\n prefix[i] = prefix[i - 1]\n count += 1\n elif i == 0:\n prefix[i] = 0\n count += 0\n else:\n prefix[i] = prefix[i - 1] + count\n count += 0\n count = 0\n for i in range(n - 1, -1, -1):\n val = st[i]\n if val == '*':\n if i == n - 1:\n suffix[i] = 0\n count += 1\n else:\n suffix[i] = suffix[i + 1]\n count += 1\n elif i == n - 1:\n suffix[i] = 0\n count += 0\n else:\n suffix[i] = suffix[i + 1] + count\n count += 0\n ans = 10 ** 12\n for i in range(n):\n if i != n - 1:\n ans = min(ans, prefix[i] + suffix[i + 1])\n else:\n ans = min(ans, prefix[i])\n print(ans)\n\n\nT = int(input())\nfor _ in range(T):\n n = int(input())\n st = input()\n fun(st, n)\n",
"step-5": "def fun(st,n):\n suffix=[0 for i in range(n)]\n prefix=[0 for i in range(n)]\n count=0\n for i,val in enumerate(st):\n if(val=='*'):\n if(i==0):\n prefix[i]=0\n count+=1\n else:\n prefix[i]=prefix[i-1]\n count+=1\n else:\n if(i==0):\n prefix[i]=0\n count+=0\n else:\n prefix[i]=prefix[i-1]+count\n count+=0\n count=0\n for i in range(n-1,-1,-1):\n val=st[i]\n if(val=='*'):\n if(i==n-1):\n suffix[i]=0\n count+=1\n else:\n suffix[i]=suffix[i+1]\n count+=1\n else:\n if(i==n-1):\n suffix[i]=0\n count+=0\n else:\n suffix[i]=suffix[i+1]+count\n count+=0\n ans=10**12\n for i in range(n):\n if(i!=n-1):\n ans=min(ans,prefix[i]+suffix[i+1])\n else:\n ans=min(ans,prefix[i])\n print(ans)\n\nT = int(input())\nfor _ in range(T):\n n=int(input())\n st=input()\n fun(st,n)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestGetExamTickets(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
ticket_map: Dict[str, ExamTicket]
def setUp(self):
super().setUp()
self.get_exam_questions = ApiClient('/api/tickets', student=self.
student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exam_questions(self):
self.assertFalse(self.student_session.check_in)
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
self.assertEqual([x['id'] for x in questions], [x.id for x in
sorted(self.tickets, key=lambda x: x.question.stage)])
for question in questions:
ticket = self.ticket_map[question['id']]
ticket_question = ticket.question
self.assertEqual(question.pop('id'), ticket.id)
view = ticket_question.as_dict
view.pop('id')
self.assertEqual(question, view)
def test_get_exam_questions_already_checked_in(self):
self.student_session.check_in = True
checkin_date = self.student_session.started_at
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
self.assertEqual(self.student_session.started_at, checkin_date)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
<|reserved_special_token_0|>
def test_get_exam_questions_submitted(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
answer = question.pop('answer')
self.assertEqual(answer, ticket.answer)
self.assertEqual(question['score'], None)
def test_get_exam_questions_submitted_and_scored(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], sum(t.score for t in self.tickets))
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
self.assertEqual(question['score'], ticket.score)
def test_get_exam_questions_invalid_params(self):
self.assertResponseError(self.get_exam_questions.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.get_exam_questions.post(session_id=
uuid_str()), errors.ExamNotFound)
self.get_exam_questions.cookies = {}
self.assertResponseError(self.get_exam_questions.post(session_id=
self.student_session.id), errors.Unauthorized)
response = self.get_exam_questions.get()
self.assertEqual(response.status_code, 405)
class TestSubmitExam(ApiTestCase):
def setUp(self):
super().setUp()
self.submit_exam = ApiClient('/api/submit', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_submit_exam(self):
answers = {}
ANSWER = 'answer'
for ticket in self.tickets:
if ticket.question.type == QuestionType.single:
answers[ticket.id] = random.randint(0, len(ticket.question.
options) - 1)
elif ticket.question.type == QuestionType.multi:
answers[ticket.id] = random.sample(list(range(0, len(ticket
.question.options))), k=random.randint(0, len(ticket.
question.options)))
else:
answers[ticket.id] = ANSWER
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
if ticket.question.type == QuestionType.single:
self.assertEqual(ticket.answer, ticket.question.options[
answers[ticket.id]])
elif ticket.question.type == QuestionType.multi:
self.assertEqual(ticket.answer, ';'.join([ticket.question.
options[x] for x in sorted(answers[ticket.id])]))
self.assertIsNotNone(ticket.answered_at)
def test_submit_without_any_answer(self):
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers={}))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(ticket.answered_at)
self.assertIsNone(ticket.answer)
def test_submit_partial_answer_errors(self):
ANSWER = 'answer'
answers = {self.tickets[0].id: len(self.tickets[0].question.options
), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(
): ANSWER, (self.tickets[2].id + 1): ANSWER}
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(self.tickets[0].answer)
self.assertIsNone(self.tickets[0].answered_at)
self.assertIsNone(self.tickets[1].answer)
self.assertIsNone(self.tickets[1].answered_at)
self.assertEqual(self.tickets[2].answer, ANSWER)
self.assertIsNotNone(self.tickets[2].answered_at)
def test_submit_errors(self):
self.assertResponseError(self.submit_exam.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=123),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
)), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers=[]), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers={}), errors.ExamNotFound)
self.session.start_time += self.session.duration
self.session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
self.student_session.start_time = timezone.now()
self.student_session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ApiTestCase(TestCase):
group: AcademyGroup
student: Student
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.group = AcademyGroup.objects.create(name='test_group')
cls.student = Student.objects.create(name='test user', group=cls.group)
@classmethod
def tearDownClass(cls):
cls.student.delete()
cls.group.delete()
super().tearDownClass()
<|reserved_special_token_0|>
def teardown_exam_objects(self):
for ticket in self.tickets:
ticket.delete()
for question in self.questions:
question.delete()
self.student_session.delete()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TestAuthorize(ApiTestCase):
authorize: ApiClient
def setUp(self):
super().setUp()
self.authorize = ApiClient('/api/authorize')
def test_authorized(self):
response = self.authorize.post(token=self.student.id)
result = self.assertResponseSuccess(response)
self.assertEqual(response.cookies['student'].value, self.student.id)
self.assertEqual(result['name'], self.student.name)
self.assertEqual(result['group'], self.group.name)
self.assertEqual(result['id'], self.student.id)
def test_authorized_unknown_token(self):
response = self.authorize.post(token=uuid_str())
self.assertResponseError(response, errors.Unauthorized)
def test_authorized_invalid_params(self):
response = self.authorize.post()
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.post(token=12345678)
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.get()
self.assertEqual(response.status_code, 405)
class TestGetExamSessions(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
def setUp(self):
super().setUp()
self.get_exams = ApiClient('/api/exams', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exams_available(self):
result = self.assertResponseSuccess(self.get_exams.get())
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
user_session = result[0]
self.assertEqual(user_session['started_at'], self.session.
start_time.isoformat())
self.assertEqual(user_session['duration'], self.session.duration.
total_seconds() / 60)
self.assertEqual(user_session['checked_in'], False)
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.available.value)
self.assertEqual(user_session['score'], None)
def test_get_exams_check_in(self):
self.student_session.started_at = timezone.now()
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['checked_in'], True)
def test_get_exams_submitted(self):
now = timezone.now()
self.student_session.started_at = timezone.now()
self.student_session.finished_at = now
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['finished_at'], now.isoformat())
self.assertEqual(user_session['status'], ExamStatus.submitted)
self.assertEqual(user_session['score'], None)
def test_get_exams_non_available(self):
self.session.start_time = timezone.now() + self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['started_at'], self.session.
start_time.isoformat())
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.not_available)
def test_get_exams_unauthorized(self):
self.get_exams.cookies = {}
self.assertResponseError(self.get_exams.get(), errors.Unauthorized)
response = self.get_exams.post()
self.assertEqual(response.status_code, 405)
def test_get_exams_score(self):
for ticket in self.tickets:
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], sum(t.score for t in self.
tickets))
self.tickets[0].score = None
self.tickets[0].save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], None)
class TestGetExamTickets(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
ticket_map: Dict[str, ExamTicket]
def setUp(self):
super().setUp()
self.get_exam_questions = ApiClient('/api/tickets', student=self.
student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exam_questions(self):
self.assertFalse(self.student_session.check_in)
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
self.assertEqual([x['id'] for x in questions], [x.id for x in
sorted(self.tickets, key=lambda x: x.question.stage)])
for question in questions:
ticket = self.ticket_map[question['id']]
ticket_question = ticket.question
self.assertEqual(question.pop('id'), ticket.id)
view = ticket_question.as_dict
view.pop('id')
self.assertEqual(question, view)
def test_get_exam_questions_already_checked_in(self):
self.student_session.check_in = True
checkin_date = self.student_session.started_at
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
self.assertEqual(self.student_session.started_at, checkin_date)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
def test_get_exam_questions_not_available(self):
self.session.start_time += self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.not_available)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), 0)
def test_get_exam_questions_submitted(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
answer = question.pop('answer')
self.assertEqual(answer, ticket.answer)
self.assertEqual(question['score'], None)
def test_get_exam_questions_submitted_and_scored(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], sum(t.score for t in self.tickets))
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
self.assertEqual(question['score'], ticket.score)
def test_get_exam_questions_invalid_params(self):
self.assertResponseError(self.get_exam_questions.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.get_exam_questions.post(session_id=
uuid_str()), errors.ExamNotFound)
self.get_exam_questions.cookies = {}
self.assertResponseError(self.get_exam_questions.post(session_id=
self.student_session.id), errors.Unauthorized)
response = self.get_exam_questions.get()
self.assertEqual(response.status_code, 405)
class TestSubmitExam(ApiTestCase):
def setUp(self):
super().setUp()
self.submit_exam = ApiClient('/api/submit', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_submit_exam(self):
answers = {}
ANSWER = 'answer'
for ticket in self.tickets:
if ticket.question.type == QuestionType.single:
answers[ticket.id] = random.randint(0, len(ticket.question.
options) - 1)
elif ticket.question.type == QuestionType.multi:
answers[ticket.id] = random.sample(list(range(0, len(ticket
.question.options))), k=random.randint(0, len(ticket.
question.options)))
else:
answers[ticket.id] = ANSWER
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
if ticket.question.type == QuestionType.single:
self.assertEqual(ticket.answer, ticket.question.options[
answers[ticket.id]])
elif ticket.question.type == QuestionType.multi:
self.assertEqual(ticket.answer, ';'.join([ticket.question.
options[x] for x in sorted(answers[ticket.id])]))
self.assertIsNotNone(ticket.answered_at)
def test_submit_without_any_answer(self):
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers={}))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(ticket.answered_at)
self.assertIsNone(ticket.answer)
def test_submit_partial_answer_errors(self):
ANSWER = 'answer'
answers = {self.tickets[0].id: len(self.tickets[0].question.options
), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(
): ANSWER, (self.tickets[2].id + 1): ANSWER}
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(self.tickets[0].answer)
self.assertIsNone(self.tickets[0].answered_at)
self.assertIsNone(self.tickets[1].answer)
self.assertIsNone(self.tickets[1].answered_at)
self.assertEqual(self.tickets[2].answer, ANSWER)
self.assertIsNotNone(self.tickets[2].answered_at)
def test_submit_errors(self):
self.assertResponseError(self.submit_exam.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=123),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
)), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers=[]), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers={}), errors.ExamNotFound)
self.session.start_time += self.session.duration
self.session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
self.student_session.start_time = timezone.now()
self.student_session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ApiClient(Client):
path: str
def __init__(self, path: str, student: Student=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.student = student
self.path = path
self.headers = {'content_type': 'application/json'}
if student:
self.cookies['student'] = student.id
def path_params(self, **params):
return ApiClient(self.path.format(**params), self.student)
def get(self, **kwargs):
return super().get(self.path, data=kwargs, **self.headers)
def post(self, **json):
return super().post(self.path, data=json, **self.headers)
<|reserved_special_token_0|>
class ApiTestCase(TestCase):
group: AcademyGroup
student: Student
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.group = AcademyGroup.objects.create(name='test_group')
cls.student = Student.objects.create(name='test user', group=cls.group)
@classmethod
def tearDownClass(cls):
cls.student.delete()
cls.group.delete()
super().tearDownClass()
def setup_exam_objects(self):
self.session = ExamSession.objects.create(start_time=timezone.now(),
duration=timedelta(minutes=40))
self.student_session = UserSession.objects.create(student=self.
student, exam_session=self.session)
self.questions = [Question.objects.create(stage=Stage.first, type=
QuestionType.single, max_score=1, text='test single question',
options=['a', 'b', 'c']), Question.objects.create(stage=Stage.
first, type=QuestionType.multi, max_score=1, text=
'test multi question', options=['a', 'b', 'c']), Question.
objects.create(stage=Stage.second, type=QuestionType.open,
max_score=1, text='test open question', options=None)]
self.tickets = [ExamTicket.objects.create(student=self.student,
session=self.student_session, question=question) for question in
self.questions]
self.ticket_map = {x.id: x for x in self.tickets}
def teardown_exam_objects(self):
for ticket in self.tickets:
ticket.delete()
for question in self.questions:
question.delete()
self.student_session.delete()
def assertResponseSuccess(self, response: http.HttpResponse):
content = response.content.decode()
self.assertEqual(response.status_code, 200, (response.status_code,
content))
content = response.json()
self.assertIn('result', content, content)
return content['result']
def assertResponseError(self, response: http.JsonResponse, error: Union
[errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:
content = response.json()
self.assertGreaterEqual(response.status_code, 400, (response.
status_code, content))
self.assertIn('error', content, content)
if error is not None:
if isinstance(error, type):
error = error()
self.assertEqual(response.status_code, error.status, (response.
status_code, content))
self.assertEqual(content['error'], error.message, (response.
status_code, content))
return response.status_code, content['error']
class TestAuthorize(ApiTestCase):
authorize: ApiClient
def setUp(self):
super().setUp()
self.authorize = ApiClient('/api/authorize')
def test_authorized(self):
response = self.authorize.post(token=self.student.id)
result = self.assertResponseSuccess(response)
self.assertEqual(response.cookies['student'].value, self.student.id)
self.assertEqual(result['name'], self.student.name)
self.assertEqual(result['group'], self.group.name)
self.assertEqual(result['id'], self.student.id)
def test_authorized_unknown_token(self):
response = self.authorize.post(token=uuid_str())
self.assertResponseError(response, errors.Unauthorized)
def test_authorized_invalid_params(self):
response = self.authorize.post()
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.post(token=12345678)
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.get()
self.assertEqual(response.status_code, 405)
class TestGetExamSessions(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
def setUp(self):
super().setUp()
self.get_exams = ApiClient('/api/exams', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exams_available(self):
result = self.assertResponseSuccess(self.get_exams.get())
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
user_session = result[0]
self.assertEqual(user_session['started_at'], self.session.
start_time.isoformat())
self.assertEqual(user_session['duration'], self.session.duration.
total_seconds() / 60)
self.assertEqual(user_session['checked_in'], False)
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.available.value)
self.assertEqual(user_session['score'], None)
def test_get_exams_check_in(self):
self.student_session.started_at = timezone.now()
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['checked_in'], True)
def test_get_exams_submitted(self):
now = timezone.now()
self.student_session.started_at = timezone.now()
self.student_session.finished_at = now
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['finished_at'], now.isoformat())
self.assertEqual(user_session['status'], ExamStatus.submitted)
self.assertEqual(user_session['score'], None)
def test_get_exams_non_available(self):
self.session.start_time = timezone.now() + self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['started_at'], self.session.
start_time.isoformat())
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.not_available)
def test_get_exams_unauthorized(self):
self.get_exams.cookies = {}
self.assertResponseError(self.get_exams.get(), errors.Unauthorized)
response = self.get_exams.post()
self.assertEqual(response.status_code, 405)
def test_get_exams_score(self):
for ticket in self.tickets:
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], sum(t.score for t in self.
tickets))
self.tickets[0].score = None
self.tickets[0].save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], None)
class TestGetExamTickets(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
ticket_map: Dict[str, ExamTicket]
def setUp(self):
super().setUp()
self.get_exam_questions = ApiClient('/api/tickets', student=self.
student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exam_questions(self):
self.assertFalse(self.student_session.check_in)
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
self.assertEqual([x['id'] for x in questions], [x.id for x in
sorted(self.tickets, key=lambda x: x.question.stage)])
for question in questions:
ticket = self.ticket_map[question['id']]
ticket_question = ticket.question
self.assertEqual(question.pop('id'), ticket.id)
view = ticket_question.as_dict
view.pop('id')
self.assertEqual(question, view)
def test_get_exam_questions_already_checked_in(self):
self.student_session.check_in = True
checkin_date = self.student_session.started_at
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
self.assertEqual(self.student_session.started_at, checkin_date)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
def test_get_exam_questions_not_available(self):
self.session.start_time += self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.not_available)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), 0)
def test_get_exam_questions_submitted(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
answer = question.pop('answer')
self.assertEqual(answer, ticket.answer)
self.assertEqual(question['score'], None)
def test_get_exam_questions_submitted_and_scored(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], sum(t.score for t in self.tickets))
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
self.assertEqual(question['score'], ticket.score)
def test_get_exam_questions_invalid_params(self):
self.assertResponseError(self.get_exam_questions.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.get_exam_questions.post(session_id=
uuid_str()), errors.ExamNotFound)
self.get_exam_questions.cookies = {}
self.assertResponseError(self.get_exam_questions.post(session_id=
self.student_session.id), errors.Unauthorized)
response = self.get_exam_questions.get()
self.assertEqual(response.status_code, 405)
class TestSubmitExam(ApiTestCase):
def setUp(self):
super().setUp()
self.submit_exam = ApiClient('/api/submit', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_submit_exam(self):
answers = {}
ANSWER = 'answer'
for ticket in self.tickets:
if ticket.question.type == QuestionType.single:
answers[ticket.id] = random.randint(0, len(ticket.question.
options) - 1)
elif ticket.question.type == QuestionType.multi:
answers[ticket.id] = random.sample(list(range(0, len(ticket
.question.options))), k=random.randint(0, len(ticket.
question.options)))
else:
answers[ticket.id] = ANSWER
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
if ticket.question.type == QuestionType.single:
self.assertEqual(ticket.answer, ticket.question.options[
answers[ticket.id]])
elif ticket.question.type == QuestionType.multi:
self.assertEqual(ticket.answer, ';'.join([ticket.question.
options[x] for x in sorted(answers[ticket.id])]))
self.assertIsNotNone(ticket.answered_at)
def test_submit_without_any_answer(self):
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers={}))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(ticket.answered_at)
self.assertIsNone(ticket.answer)
def test_submit_partial_answer_errors(self):
ANSWER = 'answer'
answers = {self.tickets[0].id: len(self.tickets[0].question.options
), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(
): ANSWER, (self.tickets[2].id + 1): ANSWER}
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(self.tickets[0].answer)
self.assertIsNone(self.tickets[0].answered_at)
self.assertIsNone(self.tickets[1].answer)
self.assertIsNone(self.tickets[1].answered_at)
self.assertEqual(self.tickets[2].answer, ANSWER)
self.assertIsNotNone(self.tickets[2].answered_at)
def test_submit_errors(self):
self.assertResponseError(self.submit_exam.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=123),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
)), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers=[]), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers={}), errors.ExamNotFound)
self.session.start_time += self.session.duration
self.session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
self.student_session.start_time = timezone.now()
self.student_session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
<|reserved_special_token_1|>
import random
from datetime import timedelta
from typing import Union, Type, Tuple, List, Dict
from django import http
from django.test import TestCase, Client
from django.utils import timezone
from exam_web import errors
from exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus
class ApiClient(Client):
path: str
def __init__(self, path: str, student: Student=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.student = student
self.path = path
self.headers = {'content_type': 'application/json'}
if student:
self.cookies['student'] = student.id
def path_params(self, **params):
return ApiClient(self.path.format(**params), self.student)
def get(self, **kwargs):
return super().get(self.path, data=kwargs, **self.headers)
def post(self, **json):
return super().post(self.path, data=json, **self.headers)
def __call__(self, **kwargs):
raise AttributeError('Use `get` or `post` methods instead')
class ApiTestCase(TestCase):
group: AcademyGroup
student: Student
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.group = AcademyGroup.objects.create(name='test_group')
cls.student = Student.objects.create(name='test user', group=cls.group)
@classmethod
def tearDownClass(cls):
cls.student.delete()
cls.group.delete()
super().tearDownClass()
def setup_exam_objects(self):
self.session = ExamSession.objects.create(start_time=timezone.now(),
duration=timedelta(minutes=40))
self.student_session = UserSession.objects.create(student=self.
student, exam_session=self.session)
self.questions = [Question.objects.create(stage=Stage.first, type=
QuestionType.single, max_score=1, text='test single question',
options=['a', 'b', 'c']), Question.objects.create(stage=Stage.
first, type=QuestionType.multi, max_score=1, text=
'test multi question', options=['a', 'b', 'c']), Question.
objects.create(stage=Stage.second, type=QuestionType.open,
max_score=1, text='test open question', options=None)]
self.tickets = [ExamTicket.objects.create(student=self.student,
session=self.student_session, question=question) for question in
self.questions]
self.ticket_map = {x.id: x for x in self.tickets}
def teardown_exam_objects(self):
for ticket in self.tickets:
ticket.delete()
for question in self.questions:
question.delete()
self.student_session.delete()
def assertResponseSuccess(self, response: http.HttpResponse):
content = response.content.decode()
self.assertEqual(response.status_code, 200, (response.status_code,
content))
content = response.json()
self.assertIn('result', content, content)
return content['result']
def assertResponseError(self, response: http.JsonResponse, error: Union
[errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:
content = response.json()
self.assertGreaterEqual(response.status_code, 400, (response.
status_code, content))
self.assertIn('error', content, content)
if error is not None:
if isinstance(error, type):
error = error()
self.assertEqual(response.status_code, error.status, (response.
status_code, content))
self.assertEqual(content['error'], error.message, (response.
status_code, content))
return response.status_code, content['error']
class TestAuthorize(ApiTestCase):
authorize: ApiClient
def setUp(self):
super().setUp()
self.authorize = ApiClient('/api/authorize')
def test_authorized(self):
response = self.authorize.post(token=self.student.id)
result = self.assertResponseSuccess(response)
self.assertEqual(response.cookies['student'].value, self.student.id)
self.assertEqual(result['name'], self.student.name)
self.assertEqual(result['group'], self.group.name)
self.assertEqual(result['id'], self.student.id)
def test_authorized_unknown_token(self):
response = self.authorize.post(token=uuid_str())
self.assertResponseError(response, errors.Unauthorized)
def test_authorized_invalid_params(self):
response = self.authorize.post()
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.post(token=12345678)
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.get()
self.assertEqual(response.status_code, 405)
class TestGetExamSessions(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
def setUp(self):
super().setUp()
self.get_exams = ApiClient('/api/exams', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exams_available(self):
result = self.assertResponseSuccess(self.get_exams.get())
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
user_session = result[0]
self.assertEqual(user_session['started_at'], self.session.
start_time.isoformat())
self.assertEqual(user_session['duration'], self.session.duration.
total_seconds() / 60)
self.assertEqual(user_session['checked_in'], False)
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.available.value)
self.assertEqual(user_session['score'], None)
def test_get_exams_check_in(self):
self.student_session.started_at = timezone.now()
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['checked_in'], True)
def test_get_exams_submitted(self):
now = timezone.now()
self.student_session.started_at = timezone.now()
self.student_session.finished_at = now
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['finished_at'], now.isoformat())
self.assertEqual(user_session['status'], ExamStatus.submitted)
self.assertEqual(user_session['score'], None)
def test_get_exams_non_available(self):
self.session.start_time = timezone.now() + self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['started_at'], self.session.
start_time.isoformat())
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.not_available)
def test_get_exams_unauthorized(self):
self.get_exams.cookies = {}
self.assertResponseError(self.get_exams.get(), errors.Unauthorized)
response = self.get_exams.post()
self.assertEqual(response.status_code, 405)
def test_get_exams_score(self):
for ticket in self.tickets:
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], sum(t.score for t in self.
tickets))
self.tickets[0].score = None
self.tickets[0].save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], None)
class TestGetExamTickets(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
ticket_map: Dict[str, ExamTicket]
def setUp(self):
super().setUp()
self.get_exam_questions = ApiClient('/api/tickets', student=self.
student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exam_questions(self):
self.assertFalse(self.student_session.check_in)
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
self.assertEqual([x['id'] for x in questions], [x.id for x in
sorted(self.tickets, key=lambda x: x.question.stage)])
for question in questions:
ticket = self.ticket_map[question['id']]
ticket_question = ticket.question
self.assertEqual(question.pop('id'), ticket.id)
view = ticket_question.as_dict
view.pop('id')
self.assertEqual(question, view)
def test_get_exam_questions_already_checked_in(self):
self.student_session.check_in = True
checkin_date = self.student_session.started_at
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
self.assertEqual(self.student_session.started_at, checkin_date)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
def test_get_exam_questions_not_available(self):
self.session.start_time += self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.not_available)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), 0)
def test_get_exam_questions_submitted(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
answer = question.pop('answer')
self.assertEqual(answer, ticket.answer)
self.assertEqual(question['score'], None)
def test_get_exam_questions_submitted_and_scored(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exam_questions.post(
session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], sum(t.score for t in self.tickets))
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
self.assertEqual(question['score'], ticket.score)
def test_get_exam_questions_invalid_params(self):
self.assertResponseError(self.get_exam_questions.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.get_exam_questions.post(session_id=
uuid_str()), errors.ExamNotFound)
self.get_exam_questions.cookies = {}
self.assertResponseError(self.get_exam_questions.post(session_id=
self.student_session.id), errors.Unauthorized)
response = self.get_exam_questions.get()
self.assertEqual(response.status_code, 405)
class TestSubmitExam(ApiTestCase):
def setUp(self):
super().setUp()
self.submit_exam = ApiClient('/api/submit', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_submit_exam(self):
answers = {}
ANSWER = 'answer'
for ticket in self.tickets:
if ticket.question.type == QuestionType.single:
answers[ticket.id] = random.randint(0, len(ticket.question.
options) - 1)
elif ticket.question.type == QuestionType.multi:
answers[ticket.id] = random.sample(list(range(0, len(ticket
.question.options))), k=random.randint(0, len(ticket.
question.options)))
else:
answers[ticket.id] = ANSWER
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
if ticket.question.type == QuestionType.single:
self.assertEqual(ticket.answer, ticket.question.options[
answers[ticket.id]])
elif ticket.question.type == QuestionType.multi:
self.assertEqual(ticket.answer, ';'.join([ticket.question.
options[x] for x in sorted(answers[ticket.id])]))
self.assertIsNotNone(ticket.answered_at)
def test_submit_without_any_answer(self):
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers={}))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(ticket.answered_at)
self.assertIsNone(ticket.answer)
def test_submit_partial_answer_errors(self):
ANSWER = 'answer'
answers = {self.tickets[0].id: len(self.tickets[0].question.options
), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(
): ANSWER, (self.tickets[2].id + 1): ANSWER}
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(self.tickets[0].answer)
self.assertIsNone(self.tickets[0].answered_at)
self.assertIsNone(self.tickets[1].answer)
self.assertIsNone(self.tickets[1].answered_at)
self.assertEqual(self.tickets[2].answer, ANSWER)
self.assertIsNotNone(self.tickets[2].answered_at)
def test_submit_errors(self):
self.assertResponseError(self.submit_exam.post(), errors.
InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=123),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
)), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers=[]), errors.InvalidParameter('answers'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str(
), answers={}), errors.ExamNotFound)
self.session.start_time += self.session.duration
self.session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
self.student_session.start_time = timezone.now()
self.student_session.save()
self.assertResponseError(self.submit_exam.post(session_id=self.
student_session.id, answers={}), errors.ExamNotAvailable)
<|reserved_special_token_1|>
import random
from datetime import timedelta
from typing import Union, Type, Tuple, List, Dict
from django import http
from django.test import TestCase, Client
from django.utils import timezone
from exam_web import errors
from exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, \
UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus
class ApiClient(Client):
path: str
def __init__(self, path: str, student: Student = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.student = student
self.path = path
self.headers = {'content_type': 'application/json'}
if student:
self.cookies['student'] = student.id
def path_params(self, **params):
return ApiClient(self.path.format(**params), self.student)
def get(self, **kwargs):
return super().get(self.path, data=kwargs, **self.headers)
def post(self, **json):
return super().post(self.path, data=json, **self.headers)
def __call__(self, **kwargs):
raise AttributeError('Use `get` or `post` methods instead')
class ApiTestCase(TestCase):
group: AcademyGroup
student: Student
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.group = AcademyGroup.objects.create(name='test_group')
cls.student = Student.objects.create(name='test user', group=cls.group)
@classmethod
def tearDownClass(cls):
cls.student.delete()
cls.group.delete()
super().tearDownClass()
def setup_exam_objects(self):
self.session = ExamSession.objects.create(
start_time=timezone.now(), duration=timedelta(minutes=40))
self.student_session = UserSession.objects.create(
student=self.student, exam_session=self.session)
self.questions = [
Question.objects.create(
stage=Stage.first, type=QuestionType.single, max_score=1,
text='test single question', options=['a', 'b', 'c']
),
Question.objects.create(
stage=Stage.first, type=QuestionType.multi, max_score=1,
text='test multi question', options=['a', 'b', 'c']
),
Question.objects.create(
stage=Stage.second, type=QuestionType.open, max_score=1,
text='test open question', options=None,
),
]
self.tickets = [
ExamTicket.objects.create(
student=self.student, session=self.student_session,
question=question) for question in self.questions
]
self.ticket_map = {x.id: x for x in self.tickets}
def teardown_exam_objects(self):
for ticket in self.tickets:
ticket.delete()
for question in self.questions:
question.delete()
self.student_session.delete()
def assertResponseSuccess(self, response: http.HttpResponse):
content = response.content.decode()
self.assertEqual(response.status_code, 200,
(response.status_code, content))
content = response.json()
self.assertIn('result', content, content)
return content['result']
def assertResponseError(
self, response: http.JsonResponse,
error: Union[errors.APIError, Type[errors.APIError]] = None
) -> Tuple[int, str]:
content = response.json()
self.assertGreaterEqual(response.status_code, 400,
(response.status_code, content))
self.assertIn('error', content, content)
if error is not None:
if isinstance(error, type):
error = error()
self.assertEqual(response.status_code, error.status,
(response.status_code, content))
self.assertEqual(content['error'], error.message,
(response.status_code, content))
return response.status_code, content['error']
class TestAuthorize(ApiTestCase):
authorize: ApiClient
def setUp(self):
super().setUp()
self.authorize = ApiClient('/api/authorize')
def test_authorized(self):
response = self.authorize.post(token=self.student.id)
result = self.assertResponseSuccess(response)
self.assertEqual(response.cookies['student'].value, self.student.id)
self.assertEqual(result['name'], self.student.name)
self.assertEqual(result['group'], self.group.name)
self.assertEqual(result['id'], self.student.id)
def test_authorized_unknown_token(self):
response = self.authorize.post(token=uuid_str())
self.assertResponseError(response, errors.Unauthorized)
def test_authorized_invalid_params(self):
response = self.authorize.post()
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.post(token=12345678)
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.get()
self.assertEqual(response.status_code, 405)
class TestGetExamSessions(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
def setUp(self):
super().setUp()
self.get_exams = ApiClient('/api/exams', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exams_available(self):
result = self.assertResponseSuccess(self.get_exams.get())
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
user_session = result[0]
self.assertEqual(
user_session['started_at'], self.session.start_time.isoformat())
self.assertEqual(user_session['duration'],
self.session.duration.total_seconds() / 60)
self.assertEqual(user_session['checked_in'], False)
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.available.value)
self.assertEqual(user_session['score'], None)
def test_get_exams_check_in(self):
self.student_session.started_at = timezone.now()
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['checked_in'], True)
def test_get_exams_submitted(self):
now = timezone.now()
self.student_session.started_at = timezone.now()
self.student_session.finished_at = now
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['finished_at'], now.isoformat())
self.assertEqual(user_session['status'], ExamStatus.submitted)
self.assertEqual(user_session['score'], None)
def test_get_exams_non_available(self):
self.session.start_time = timezone.now() + self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['started_at'],
self.session.start_time.isoformat())
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.not_available)
def test_get_exams_unauthorized(self):
self.get_exams.cookies = {}
self.assertResponseError(self.get_exams.get(), errors.Unauthorized)
response = self.get_exams.post()
self.assertEqual(response.status_code, 405)
def test_get_exams_score(self):
for ticket in self.tickets:
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'],
sum(t.score for t in self.tickets))
self.tickets[0].score = None
self.tickets[0].save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], None)
class TestGetExamTickets(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
ticket_map: Dict[str, ExamTicket]
def setUp(self):
super().setUp()
self.get_exam_questions = \
ApiClient('/api/tickets', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exam_questions(self):
self.assertFalse(self.student_session.check_in)
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
self.assertEqual([x['id'] for x in questions], [
x.id for x in sorted(self.tickets, key=lambda x: x.question.stage)
])
for question in questions:
ticket = self.ticket_map[question['id']]
ticket_question = ticket.question
self.assertEqual(question.pop('id'), ticket.id)
view = ticket_question.as_dict
view.pop('id')
self.assertEqual(question, view)
def test_get_exam_questions_already_checked_in(self):
self.student_session.check_in = True
checkin_date = self.student_session.started_at
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
self.assertEqual(self.student_session.started_at, checkin_date)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
def test_get_exam_questions_not_available(self):
self.session.start_time += self.session.duration
self.session.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.not_available)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), 0)
def test_get_exam_questions_submitted(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
answer = question.pop('answer')
self.assertEqual(answer, ticket.answer)
self.assertEqual(question['score'], None)
def test_get_exam_questions_submitted_and_scored(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], sum(t.score for t in self.tickets))
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
self.assertEqual(question['score'], ticket.score)
def test_get_exam_questions_invalid_params(self):
self.assertResponseError(self.get_exam_questions.post(),
errors.InvalidParameter('session_id'))
self.assertResponseError(
self.get_exam_questions.post(session_id=uuid_str()),
errors.ExamNotFound)
self.get_exam_questions.cookies = {}
self.assertResponseError(
self.get_exam_questions.post(session_id=self.student_session.id),
errors.Unauthorized)
response = self.get_exam_questions.get()
self.assertEqual(response.status_code, 405)
class TestSubmitExam(ApiTestCase):
def setUp(self):
super().setUp()
self.submit_exam = ApiClient('/api/submit', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_submit_exam(self):
answers = {}
ANSWER = 'answer'
for ticket in self.tickets:
if ticket.question.type == QuestionType.single:
answers[ticket.id] = \
random.randint(0, len(ticket.question.options)-1)
elif ticket.question.type == QuestionType.multi:
answers[ticket.id] = random.sample(
list(range(0, len(ticket.question.options))),
k=random.randint(0, len(ticket.question.options))
)
else:
answers[ticket.id] = ANSWER
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
if ticket.question.type == QuestionType.single:
self.assertEqual(
ticket.answer, ticket.question.options[answers[ticket.id]])
elif ticket.question.type == QuestionType.multi:
self.assertEqual(ticket.answer, ';'.join([
ticket.question.options[x]
for x in sorted(answers[ticket.id])
]))
self.assertIsNotNone(ticket.answered_at)
def test_submit_without_any_answer(self):
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers={}))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(ticket.answered_at)
self.assertIsNone(ticket.answer)
def test_submit_partial_answer_errors(self):
ANSWER = 'answer'
answers = {
# неверный порядковый индекс ответа
self.tickets[0].id: len(self.tickets[0].question.options),
# неверный тип ответа
self.tickets[1].id: 0,
# корректный ответ
self.tickets[2].id: ANSWER,
# неверный ид билета
uuid_str(): ANSWER,
# несуществующий тикет
self.tickets[2].id + 1: ANSWER,
}
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(self.tickets[0].answer)
self.assertIsNone(self.tickets[0].answered_at)
self.assertIsNone(self.tickets[1].answer)
self.assertIsNone(self.tickets[1].answered_at)
self.assertEqual(self.tickets[2].answer, ANSWER)
self.assertIsNotNone(self.tickets[2].answered_at)
def test_submit_errors(self):
self.assertResponseError(self.submit_exam.post(),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=123),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str()),
errors.InvalidParameter('answers'))
self.assertResponseError(
self.submit_exam.post(session_id=uuid_str(), answers=[]),
errors.InvalidParameter('answers'))
self.assertResponseError(
self.submit_exam.post(session_id=uuid_str(), answers={}),
errors.ExamNotFound)
self.session.start_time += self.session.duration
self.session.save()
self.assertResponseError(self.submit_exam.post(
session_id=self.student_session.id, answers={}),
errors.ExamNotAvailable)
self.student_session.start_time = timezone.now()
self.student_session.save()
self.assertResponseError(self.submit_exam.post(
session_id=self.student_session.id, answers={}),
errors.ExamNotAvailable)
|
flexible
|
{
"blob_id": "44e4151279884ce7c5d5a9e5c82916ce2d3ccbc2",
"index": 9789,
"step-1": "<mask token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <mask token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-2": "<mask token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n <mask token>\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n <mask token>\n <mask token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-3": "<mask token>\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n <mask token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-4": "import random\nfrom datetime import timedelta\nfrom typing import Union, Type, Tuple, List, Dict\nfrom django import http\nfrom django.test import TestCase, Client\nfrom django.utils import timezone\nfrom exam_web import errors\nfrom exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n\n def __call__(self, **kwargs):\n raise AttributeError('Use `get` or `post` methods instead')\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"step-5": "import random\nfrom datetime import timedelta\nfrom typing import Union, Type, Tuple, List, Dict\n\nfrom django import http\nfrom django.test import TestCase, Client\nfrom django.utils import timezone\n\nfrom exam_web import errors\nfrom exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, \\\n UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student = None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n\n def __call__(self, **kwargs):\n raise AttributeError('Use `get` or `post` methods instead')\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(\n start_time=timezone.now(), duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(\n student=self.student, exam_session=self.session)\n self.questions = [\n Question.objects.create(\n stage=Stage.first, type=QuestionType.single, max_score=1,\n text='test single question', options=['a', 'b', 'c']\n ),\n Question.objects.create(\n stage=Stage.first, type=QuestionType.multi, max_score=1,\n text='test multi question', options=['a', 'b', 'c']\n ),\n Question.objects.create(\n stage=Stage.second, type=QuestionType.open, max_score=1,\n text='test open question', options=None,\n ),\n ]\n self.tickets = [\n ExamTicket.objects.create(\n student=self.student, session=self.student_session,\n question=question) for question in self.questions\n ]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200,\n (response.status_code, content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(\n self, response: http.JsonResponse,\n error: Union[errors.APIError, Type[errors.APIError]] = None\n ) -> Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400,\n (response.status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status,\n (response.status_code, content))\n self.assertEqual(content['error'], error.message,\n (response.status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n\n self.assertEqual(response.cookies['student'].value, self.student.id)\n\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(\n user_session['started_at'], self.session.start_time.isoformat())\n self.assertEqual(user_session['duration'],\n self.session.duration.total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'],\n self.session.start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'],\n sum(t.score for t in self.tickets))\n\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = \\\n ApiClient('/api/tickets', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [\n x.id for x in sorted(self.tickets, key=lambda x: x.question.stage)\n ])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(\n self.get_exam_questions.post(session_id=uuid_str()),\n errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(\n self.get_exam_questions.post(session_id=self.student_session.id),\n errors.Unauthorized)\n\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = \\\n random.randint(0, len(ticket.question.options)-1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(\n list(range(0, len(ticket.question.options))),\n k=random.randint(0, len(ticket.question.options))\n )\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(\n ticket.answer, ticket.question.options[answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([\n ticket.question.options[x]\n for x in sorted(answers[ticket.id])\n ]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {\n # неверный порядковый индекс ответа\n self.tickets[0].id: len(self.tickets[0].question.options),\n # неверный тип ответа\n self.tickets[1].id: 0,\n # корректный ответ\n self.tickets[2].id: ANSWER,\n # неверный ид билета\n uuid_str(): ANSWER,\n # несуществующий тикет\n self.tickets[2].id + 1: ANSWER,\n }\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str()),\n errors.InvalidParameter('answers'))\n self.assertResponseError(\n self.submit_exam.post(session_id=uuid_str(), answers=[]),\n errors.InvalidParameter('answers'))\n self.assertResponseError(\n self.submit_exam.post(session_id=uuid_str(), answers={}),\n errors.ExamNotFound)\n\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(\n session_id=self.student_session.id, answers={}),\n errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(\n session_id=self.student_session.id, answers={}),\n errors.ExamNotAvailable)\n",
"step-ids": [
15,
34,
42,
44,
45
]
}
|
[
15,
34,
42,
44,
45
] |
<|reserved_special_token_0|>
def convert2numeral(rom):
cur = 0
num = 0
while cur < len(rom):
if cur + 1 == len(rom):
num += e[rom[cur]]
elif e[rom[cur]] > e[rom[cur + 1]]:
num += e[rom[cur]]
cur += 1
elif e[rom[cur]] < e[rom[cur + 1]]:
num += e[rom[cur + 1]] - e[rom[cur]]
cur += 2
return num
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def convert2roman(num, rom=''):
if num == 0:
return rom
digits = len(str(num))
multiple = 10 ** (digits - 1)
cur = int(num / multiple)
cur = cur * multiple
num = num % multiple
halfway = 5 * multiple
fullway = 10 * multiple
if cur + multiple == halfway:
rom += d[multiple] + d[halfway]
elif cur + multiple == fullway:
rom += d[multiple] + d[fullway]
else:
if cur >= halfway:
cur -= halfway
rom += d[halfway]
if cur > 0:
rom += d[multiple] * int(cur / multiple)
return convert2roman(num, rom)
def convert2numeral(rom):
cur = 0
num = 0
while cur < len(rom):
if cur + 1 == len(rom):
num += e[rom[cur]]
elif e[rom[cur]] > e[rom[cur + 1]]:
num += e[rom[cur]]
cur += 1
elif e[rom[cur]] < e[rom[cur + 1]]:
num += e[rom[cur + 1]] - e[rom[cur]]
cur += 2
return num
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def convert2roman(num, rom=''):
if num == 0:
return rom
digits = len(str(num))
multiple = 10 ** (digits - 1)
cur = int(num / multiple)
cur = cur * multiple
num = num % multiple
halfway = 5 * multiple
fullway = 10 * multiple
if cur + multiple == halfway:
rom += d[multiple] + d[halfway]
elif cur + multiple == fullway:
rom += d[multiple] + d[fullway]
else:
if cur >= halfway:
cur -= halfway
rom += d[halfway]
if cur > 0:
rom += d[multiple] * int(cur / multiple)
return convert2roman(num, rom)
def convert2numeral(rom):
cur = 0
num = 0
while cur < len(rom):
if cur + 1 == len(rom):
num += e[rom[cur]]
elif e[rom[cur]] > e[rom[cur + 1]]:
num += e[rom[cur]]
cur += 1
elif e[rom[cur]] < e[rom[cur + 1]]:
num += e[rom[cur + 1]] - e[rom[cur]]
cur += 2
return num
<|reserved_special_token_0|>
print(a)
<|reserved_special_token_0|>
print(a)
<|reserved_special_token_1|>
d = {(1): 'I', (5): 'V', (10): 'X', (50): 'L', (100): 'C', (500): 'D', (
1000): 'M'}
e = {'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000}
def convert2roman(num, rom=''):
if num == 0:
return rom
digits = len(str(num))
multiple = 10 ** (digits - 1)
cur = int(num / multiple)
cur = cur * multiple
num = num % multiple
halfway = 5 * multiple
fullway = 10 * multiple
if cur + multiple == halfway:
rom += d[multiple] + d[halfway]
elif cur + multiple == fullway:
rom += d[multiple] + d[fullway]
else:
if cur >= halfway:
cur -= halfway
rom += d[halfway]
if cur > 0:
rom += d[multiple] * int(cur / multiple)
return convert2roman(num, rom)
def convert2numeral(rom):
cur = 0
num = 0
while cur < len(rom):
if cur + 1 == len(rom):
num += e[rom[cur]]
elif e[rom[cur]] > e[rom[cur + 1]]:
num += e[rom[cur]]
cur += 1
elif e[rom[cur]] < e[rom[cur + 1]]:
num += e[rom[cur + 1]] - e[rom[cur]]
cur += 2
return num
a = convert2roman(499)
print(a)
a = convert2numeral(a)
print(a)
<|reserved_special_token_1|>
d = {
1 : 'I',
5 : 'V',
10: 'X',
50: 'L',
100: 'C',
500: 'D',
1000: 'M'
}
e = {
'I': 1,
'V': 5,
'X': 10,
'L': 50,
'C': 100,
'D': 500,
'M': 1000
}
def convert2roman(num,rom = ""):
if num == 0:
return rom
digits = len(str(num))
multiple = 10 ** (digits -1)
cur = int(num / multiple)
cur = cur * multiple
num = num % multiple
halfway = 5 * multiple
fullway = 10 * multiple
if cur + multiple == halfway:
rom += d[multiple] + d[halfway]
elif cur + multiple == fullway:
rom += d[multiple] + d[fullway]
else:
if cur >= halfway:
cur -= halfway
rom += d[halfway]
if cur > 0:
rom += d[multiple] * int(cur / multiple)
return convert2roman(num,rom)
def convert2numeral(rom):
cur = 0
num = 0
while cur < len(rom):
if cur+1 == len(rom):
num += e[rom[cur]]
elif e[rom[cur]] > e[rom[cur+1]]:
num += e[rom[cur]]
cur += 1
elif e[rom[cur]] < e[rom[cur+1]]:
num += (e[rom[cur + 1]] - e[rom[cur]])
cur += 2
return num
a = convert2roman(499)
print(a)
a = convert2numeral(a)
print(a)
|
flexible
|
{
"blob_id": "1a29b3138f6a33fbe2781f044c1bcccd03ecd48d",
"index": 7590,
"step-1": "<mask token>\n\n\ndef convert2numeral(rom):\n cur = 0\n num = 0\n while cur < len(rom):\n if cur + 1 == len(rom):\n num += e[rom[cur]]\n elif e[rom[cur]] > e[rom[cur + 1]]:\n num += e[rom[cur]]\n cur += 1\n elif e[rom[cur]] < e[rom[cur + 1]]:\n num += e[rom[cur + 1]] - e[rom[cur]]\n cur += 2\n return num\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef convert2roman(num, rom=''):\n if num == 0:\n return rom\n digits = len(str(num))\n multiple = 10 ** (digits - 1)\n cur = int(num / multiple)\n cur = cur * multiple\n num = num % multiple\n halfway = 5 * multiple\n fullway = 10 * multiple\n if cur + multiple == halfway:\n rom += d[multiple] + d[halfway]\n elif cur + multiple == fullway:\n rom += d[multiple] + d[fullway]\n else:\n if cur >= halfway:\n cur -= halfway\n rom += d[halfway]\n if cur > 0:\n rom += d[multiple] * int(cur / multiple)\n return convert2roman(num, rom)\n\n\ndef convert2numeral(rom):\n cur = 0\n num = 0\n while cur < len(rom):\n if cur + 1 == len(rom):\n num += e[rom[cur]]\n elif e[rom[cur]] > e[rom[cur + 1]]:\n num += e[rom[cur]]\n cur += 1\n elif e[rom[cur]] < e[rom[cur + 1]]:\n num += e[rom[cur + 1]] - e[rom[cur]]\n cur += 2\n return num\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef convert2roman(num, rom=''):\n if num == 0:\n return rom\n digits = len(str(num))\n multiple = 10 ** (digits - 1)\n cur = int(num / multiple)\n cur = cur * multiple\n num = num % multiple\n halfway = 5 * multiple\n fullway = 10 * multiple\n if cur + multiple == halfway:\n rom += d[multiple] + d[halfway]\n elif cur + multiple == fullway:\n rom += d[multiple] + d[fullway]\n else:\n if cur >= halfway:\n cur -= halfway\n rom += d[halfway]\n if cur > 0:\n rom += d[multiple] * int(cur / multiple)\n return convert2roman(num, rom)\n\n\ndef convert2numeral(rom):\n cur = 0\n num = 0\n while cur < len(rom):\n if cur + 1 == len(rom):\n num += e[rom[cur]]\n elif e[rom[cur]] > e[rom[cur + 1]]:\n num += e[rom[cur]]\n cur += 1\n elif e[rom[cur]] < e[rom[cur + 1]]:\n num += e[rom[cur + 1]] - e[rom[cur]]\n cur += 2\n return num\n\n\n<mask token>\nprint(a)\n<mask token>\nprint(a)\n",
"step-4": "d = {(1): 'I', (5): 'V', (10): 'X', (50): 'L', (100): 'C', (500): 'D', (\n 1000): 'M'}\ne = {'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000}\n\n\ndef convert2roman(num, rom=''):\n if num == 0:\n return rom\n digits = len(str(num))\n multiple = 10 ** (digits - 1)\n cur = int(num / multiple)\n cur = cur * multiple\n num = num % multiple\n halfway = 5 * multiple\n fullway = 10 * multiple\n if cur + multiple == halfway:\n rom += d[multiple] + d[halfway]\n elif cur + multiple == fullway:\n rom += d[multiple] + d[fullway]\n else:\n if cur >= halfway:\n cur -= halfway\n rom += d[halfway]\n if cur > 0:\n rom += d[multiple] * int(cur / multiple)\n return convert2roman(num, rom)\n\n\ndef convert2numeral(rom):\n cur = 0\n num = 0\n while cur < len(rom):\n if cur + 1 == len(rom):\n num += e[rom[cur]]\n elif e[rom[cur]] > e[rom[cur + 1]]:\n num += e[rom[cur]]\n cur += 1\n elif e[rom[cur]] < e[rom[cur + 1]]:\n num += e[rom[cur + 1]] - e[rom[cur]]\n cur += 2\n return num\n\n\na = convert2roman(499)\nprint(a)\na = convert2numeral(a)\nprint(a)\n",
"step-5": "\nd = {\n 1 : 'I',\n 5 : 'V',\n 10: 'X',\n 50: 'L',\n 100: 'C',\n 500: 'D',\n 1000: 'M' \n\n }\n\n\ne = {\n 'I': 1,\n 'V': 5,\n 'X': 10,\n 'L': 50,\n 'C': 100,\n 'D': 500,\n 'M': 1000\n} \n\ndef convert2roman(num,rom = \"\"):\n \n if num == 0:\n return rom\n\n digits = len(str(num))\n multiple = 10 ** (digits -1)\n \n cur = int(num / multiple)\n cur = cur * multiple\n num = num % multiple\n\n halfway = 5 * multiple\n fullway = 10 * multiple\n\n if cur + multiple == halfway:\n rom += d[multiple] + d[halfway]\n elif cur + multiple == fullway:\n rom += d[multiple] + d[fullway]\n else:\n if cur >= halfway:\n cur -= halfway\n rom += d[halfway]\n if cur > 0:\n rom += d[multiple] * int(cur / multiple)\n return convert2roman(num,rom)\n \n\ndef convert2numeral(rom):\n\n cur = 0\n num = 0\n \n while cur < len(rom):\n if cur+1 == len(rom):\n num += e[rom[cur]]\n elif e[rom[cur]] > e[rom[cur+1]]:\n num += e[rom[cur]]\n cur += 1\n elif e[rom[cur]] < e[rom[cur+1]]:\n num += (e[rom[cur + 1]] - e[rom[cur]])\n cur += 2\n return num\n\n\na = convert2roman(499)\nprint(a)\na = convert2numeral(a)\nprint(a) \n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_week(year, month, day):
str_time = '%d-%d-%d' % (year, month, day)
time_tuple = time.strptime(str_time, '%Y-%m-%d')
tuple_week = '星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'
return tuple_week[time_tuple[6]]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_week(year, month, day):
str_time = '%d-%d-%d' % (year, month, day)
time_tuple = time.strptime(str_time, '%Y-%m-%d')
tuple_week = '星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'
return tuple_week[time_tuple[6]]
print(get_week(2020, 1, 16))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import time
def get_week(year, month, day):
str_time = '%d-%d-%d' % (year, month, day)
time_tuple = time.strptime(str_time, '%Y-%m-%d')
tuple_week = '星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'
return tuple_week[time_tuple[6]]
print(get_week(2020, 1, 16))
<|reserved_special_token_1|>
"""
定义函数,根据年、月、日计算星期。
0 星期一
1 星期二
....
"""
import time
def get_week(year, month, day):
str_time = "%d-%d-%d" % (year, month, day)
time_tuple = time.strptime(str_time, "%Y-%m-%d")
tuple_week = ("星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日")
return tuple_week[time_tuple[6]]
print(get_week(2020, 1, 16))
|
flexible
|
{
"blob_id": "012d9b5aa13c557ad958343cadf935b73c808a56",
"index": 4535,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_week(year, month, day):\n str_time = '%d-%d-%d' % (year, month, day)\n time_tuple = time.strptime(str_time, '%Y-%m-%d')\n tuple_week = '星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'\n return tuple_week[time_tuple[6]]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_week(year, month, day):\n str_time = '%d-%d-%d' % (year, month, day)\n time_tuple = time.strptime(str_time, '%Y-%m-%d')\n tuple_week = '星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'\n return tuple_week[time_tuple[6]]\n\n\nprint(get_week(2020, 1, 16))\n",
"step-4": "<mask token>\nimport time\n\n\ndef get_week(year, month, day):\n str_time = '%d-%d-%d' % (year, month, day)\n time_tuple = time.strptime(str_time, '%Y-%m-%d')\n tuple_week = '星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'\n return tuple_week[time_tuple[6]]\n\n\nprint(get_week(2020, 1, 16))\n",
"step-5": "\"\"\"\n 定义函数,根据年、月、日计算星期。\n 0 星期一\n 1 星期二\n ....\n\"\"\"\nimport time\n\n\ndef get_week(year, month, day):\n str_time = \"%d-%d-%d\" % (year, month, day)\n time_tuple = time.strptime(str_time, \"%Y-%m-%d\")\n tuple_week = (\"星期一\", \"星期二\", \"星期三\", \"星期四\", \"星期五\", \"星期六\", \"星期日\")\n return tuple_week[time_tuple[6]]\n\n\nprint(get_week(2020, 1, 16))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ThalesSpider(scrapy.Spider):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ThalesSpider(scrapy.Spider):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def parse(self, response):
for entry in response.css('div.big__list__item__info'):
item = ScrapyItem()
item['source'] = 'thales'
item['date'] = 'NotAvalaible'
item['brief'] = entry.css('div.field__item even::text'
).extract_first()
item['url'] = entry.css('a::attr(href)').extract_first()
item['title'] = entry.css('a::text').extract_first()
now = datetime.datetime.now()
item['tstamp'] = now
print(item)
yield item
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ThalesSpider(scrapy.Spider):
name = 'thales'
allowed_domains = ['https://www.thalesgroup.com']
start_urls = [(
'https://www.thalesgroup.com/fr/search-everything/all/propulsion?page=%d'
% i) for i in range(0, 30)]
def parse(self, response):
for entry in response.css('div.big__list__item__info'):
item = ScrapyItem()
item['source'] = 'thales'
item['date'] = 'NotAvalaible'
item['brief'] = entry.css('div.field__item even::text'
).extract_first()
item['url'] = entry.css('a::attr(href)').extract_first()
item['title'] = entry.css('a::text').extract_first()
now = datetime.datetime.now()
item['tstamp'] = now
print(item)
yield item
<|reserved_special_token_1|>
import datetime
import scrapy
from ScrapyProject.items import ScrapyItem
class ThalesSpider(scrapy.Spider):
name = 'thales'
allowed_domains = ['https://www.thalesgroup.com']
start_urls = [(
'https://www.thalesgroup.com/fr/search-everything/all/propulsion?page=%d'
% i) for i in range(0, 30)]
def parse(self, response):
for entry in response.css('div.big__list__item__info'):
item = ScrapyItem()
item['source'] = 'thales'
item['date'] = 'NotAvalaible'
item['brief'] = entry.css('div.field__item even::text'
).extract_first()
item['url'] = entry.css('a::attr(href)').extract_first()
item['title'] = entry.css('a::text').extract_first()
now = datetime.datetime.now()
item['tstamp'] = now
print(item)
yield item
<|reserved_special_token_1|>
# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
import datetime
import scrapy
from ScrapyProject.items import ScrapyItem
class ThalesSpider(scrapy.Spider):
#item_id = ScrapyItem()
name = 'thales'
allowed_domains = ['https://www.thalesgroup.com']
start_urls = [('https://www.thalesgroup.com/fr/search-everything/all/propulsion?page=%d' %i ) for i in range(0,30)]
def parse(self, response):
# iterate entries
for entry in response.css('div.big__list__item__info'):
#retrieve info for our current post
item = ScrapyItem()
item['source'] = 'thales'
item['date'] = 'NotAvalaible'
item['brief'] = entry.css('div.field__item even::text').extract_first()
item['url'] = entry.css('a::attr(href)').extract_first()
item['title'] = entry.css('a::text').extract_first()
# check time
now = datetime.datetime.now()
item['tstamp'] = now
print(item)
yield item
|
flexible
|
{
"blob_id": "fd1b871c5cf79874acf8d5c4f1f73f7a381e23f7",
"index": 8278,
"step-1": "<mask token>\n\n\nclass ThalesSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ThalesSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n\n def parse(self, response):\n for entry in response.css('div.big__list__item__info'):\n item = ScrapyItem()\n item['source'] = 'thales'\n item['date'] = 'NotAvalaible'\n item['brief'] = entry.css('div.field__item even::text'\n ).extract_first()\n item['url'] = entry.css('a::attr(href)').extract_first()\n item['title'] = entry.css('a::text').extract_first()\n now = datetime.datetime.now()\n item['tstamp'] = now\n print(item)\n yield item\n",
"step-3": "<mask token>\n\n\nclass ThalesSpider(scrapy.Spider):\n name = 'thales'\n allowed_domains = ['https://www.thalesgroup.com']\n start_urls = [(\n 'https://www.thalesgroup.com/fr/search-everything/all/propulsion?page=%d'\n % i) for i in range(0, 30)]\n\n def parse(self, response):\n for entry in response.css('div.big__list__item__info'):\n item = ScrapyItem()\n item['source'] = 'thales'\n item['date'] = 'NotAvalaible'\n item['brief'] = entry.css('div.field__item even::text'\n ).extract_first()\n item['url'] = entry.css('a::attr(href)').extract_first()\n item['title'] = entry.css('a::text').extract_first()\n now = datetime.datetime.now()\n item['tstamp'] = now\n print(item)\n yield item\n",
"step-4": "import datetime\nimport scrapy\nfrom ScrapyProject.items import ScrapyItem\n\n\nclass ThalesSpider(scrapy.Spider):\n name = 'thales'\n allowed_domains = ['https://www.thalesgroup.com']\n start_urls = [(\n 'https://www.thalesgroup.com/fr/search-everything/all/propulsion?page=%d'\n % i) for i in range(0, 30)]\n\n def parse(self, response):\n for entry in response.css('div.big__list__item__info'):\n item = ScrapyItem()\n item['source'] = 'thales'\n item['date'] = 'NotAvalaible'\n item['brief'] = entry.css('div.field__item even::text'\n ).extract_first()\n item['url'] = entry.css('a::attr(href)').extract_first()\n item['title'] = entry.css('a::text').extract_first()\n now = datetime.datetime.now()\n item['tstamp'] = now\n print(item)\n yield item\n",
"step-5": "# This package will contain the spiders of your Scrapy project\n#\n# Please refer to the documentation for information on how to create and manage\n# your spiders.\n\nimport datetime\nimport scrapy\nfrom ScrapyProject.items import ScrapyItem\n\nclass ThalesSpider(scrapy.Spider):\n\t#item_id = ScrapyItem()\n\tname = 'thales'\n\n\n\tallowed_domains = ['https://www.thalesgroup.com']\n\n\tstart_urls = [('https://www.thalesgroup.com/fr/search-everything/all/propulsion?page=%d' %i ) for i in range(0,30)]\n\n\tdef parse(self, response):\n # iterate entries\n\n\n\t\tfor entry in response.css('div.big__list__item__info'):\n\n #retrieve info for our current post\n\t\t\titem = ScrapyItem()\n\n\t\t\titem['source'] = 'thales'\n\t\t\titem['date'] = 'NotAvalaible'\n\t\t\titem['brief'] = entry.css('div.field__item even::text').extract_first()\n\t\t\titem['url'] = entry.css('a::attr(href)').extract_first()\n\t\t\titem['title'] = entry.css('a::text').extract_first()\n\n\t\t\t# check time\n\t\t\tnow = datetime.datetime.now()\n\t\t\titem['tstamp'] = now\n\n\t\t\tprint(item)\n\n\t\t\tyield item\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if n % 2 == 0:
for i in range(0, n // 2):
a[i], a[n // 2 + i] = a[n // 2 + i], a[i]
print('after swap:', a)
else:
for i in range(0, n // 2):
a[i], a[n // 2 + i + 1] = a[n // 2 + i + 1], a[i]
print('after swap:', a)
<|reserved_special_token_1|>
a = eval(input('enter a list: '))
n = len(a)
if n % 2 == 0:
for i in range(0, n // 2):
a[i], a[n // 2 + i] = a[n // 2 + i], a[i]
print('after swap:', a)
else:
for i in range(0, n // 2):
a[i], a[n // 2 + i + 1] = a[n // 2 + i + 1], a[i]
print('after swap:', a)
<|reserved_special_token_1|>
a=eval(input('enter a list: '))
n=len(a)
if (n%2==0):
for i in range(0,n//2):
a[i],a[n//2+i]=a[n//2+i],a[i]
print('after swap:',a)
else:
for i in range(0,n//2):
a[i],a[n//2+i+1]=a[n//2+i+1],a[i]
print('after swap:',a)
|
flexible
|
{
"blob_id": "18435f43e2f52e3d2e9ff6411f8dd0510d2da54d",
"index": 656,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif n % 2 == 0:\n for i in range(0, n // 2):\n a[i], a[n // 2 + i] = a[n // 2 + i], a[i]\n print('after swap:', a)\nelse:\n for i in range(0, n // 2):\n a[i], a[n // 2 + i + 1] = a[n // 2 + i + 1], a[i]\n print('after swap:', a)\n",
"step-3": "a = eval(input('enter a list: '))\nn = len(a)\nif n % 2 == 0:\n for i in range(0, n // 2):\n a[i], a[n // 2 + i] = a[n // 2 + i], a[i]\n print('after swap:', a)\nelse:\n for i in range(0, n // 2):\n a[i], a[n // 2 + i + 1] = a[n // 2 + i + 1], a[i]\n print('after swap:', a)\n",
"step-4": "a=eval(input('enter a list: '))\r\nn=len(a)\r\nif (n%2==0):\r\n for i in range(0,n//2):\r\n a[i],a[n//2+i]=a[n//2+i],a[i]\r\n print('after swap:',a)\r\nelse:\r\n for i in range(0,n//2):\r\n a[i],a[n//2+i+1]=a[n//2+i+1],a[i]\r\n print('after swap:',a)\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# addtwo_run-py
"""
Train and test a TCN on the add two dataset.
Trying to reproduce https://arxiv.org/abs/1803.01271.
"""
print('Importing modules')
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
from torch.utils.data import DataLoader
import argparse
import sys
sys.path.append('')
sys.path.append("../../")
from data import AddTwoDataSet
from model import TCN
print('modules imported')
def parse():
parser = argparse.ArgumentParser(description='Adding Problem')
parser.add_argument(
'--N_train', type=int, default=50000, metavar='N_train')
parser.add_argument(
'--N_test', type=int, default=1000, metavar='N_test')
parser.add_argument(
'--seq_length', type=int, default=200, metavar='seq_length')
parser.add_argument(
'--batch_size', type=int, default=32, metavar='batch_size')
parser.add_argument(
'--num_layers', type=int, default=8, metavar='num_layers')
parser.add_argument(
'--in_channels', type=int, default=2, metavar='in_channels')
parser.add_argument(
'--out_channels', type=int, default=1, metavar='out_channels')
parser.add_argument(
'--kernel_size', type=int, default=7, metavar='kernel_size')
parser.add_argument(
'--res_block_size', type=int, default=30, metavar='res_block_size')
parser.add_argument(
'--bias', type=bool, default=True, metavar='bias')
parser.add_argument(
'--dropout', type=float, default=0.0, metavar='dropout')
parser.add_argument(
'--stride', type=int, default=1, metavar='stride')
parser.add_argument(
'--leveledinit', type=bool, default=False, metavar='leveledinit')
parser.add_argument(
'--model_save_path', type=str, default='adding_problem/models/tcn_addtwo.pt',
metavar='model_save_path')
parser.add_argument(
'--epochs', type=int, default=10, metavar='epochs')
parser.add_argument(
'--lr', type=float, default=2e-3, metavar='lr')
parser.add_argument(
'--clip', type=bool, default=False, metavar='clip')
parser.add_argument(
'--log_interval', type=int, default=100, metavar='log_interval')
parser.add_argument(
'--writer_path', type=str, default='adding_problem/sruns/add_two1',
metavar='writer_path')
parser.add_argument(
'--print', type=bool, default=False, metavar='print')
parser.add_argument(
'--num_workers', type=int, default=0, metavar='num_workers')
args = parser.parse_args()
return args
def run():
torch.manual_seed(1729)
""" Setup """
args = parse()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(device)
""" Dataset """
train_dataset = AddTwoDataSet(N=args.N_train, seq_length=args.seq_length)
test_dataset = AddTwoDataSet(N=args.N_test, seq_length=args.seq_length)
train_loader = DataLoader(
dataset=train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)
test_loader = DataLoader(
dataset=test_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)
""" TCN """
tcn = TCN(
num_layers=args.num_layers,
in_channels=args.in_channels,
out_channels=args.out_channels,
kernel_size=args.kernel_size,
residual_blocks_channel_size=[args.res_block_size] * args.num_layers,
bias=args.bias,
dropout=args.dropout,
stride=args.stride,
dilations=None,
leveledinit=args.leveledinit)
tcn.to(device)
if args.print:
print(
f"""Number of learnable parameters : {
sum(p.numel() for p in tcn.parameters() if p.requires_grad)}""")
""" Training parameters"""
criterion = nn.MSELoss()
optimizer = optim.Adam(tcn.parameters(), lr=args.lr)
""" Tensorboard """
writer = SummaryWriter(args.writer_path)
for ep in range(1, args.epochs+1):
""" TRAIN """
tcn.train()
total_loss = 0
for i, data in enumerate(train_loader):
x, y = data[0].to(device), data[1].to(device)
optimizer.zero_grad()
output = tcn(x)
loss = F.mse_loss(output, y)
loss.backward()
if args.clip > 0:
torch.nn.utils.clip_grad_norm_(model.parameters(), clip)
optimizer.step()
total_loss += loss.item()
if i % args.log_interval == 0:
cur_loss = total_loss / args.log_interval
processed = min(i*args.batch_size, args.N_train)
writer.add_scalar('training_loss', cur_loss, processed)
if args.print:
print(
(f"Train Epoch: {ep:2d}"
f"[{processed:6d}/{args.N_train:6d}"
f"({100.*processed/args.N_train:.0f}%)]"
f"\tLearning rate: {args.lr:.4f}\tLoss: {cur_loss:.6f}"))
total_loss = 0
""" EVALUATE """
tcn.eval()
with torch.no_grad():
for data in test_loader:
x, y = data[0].to(device), data[1].to(device)
output = tcn(x)
test_loss = criterion(output, y)
if args.print:
print(
f'\nTest set: Average loss: {test_loss.item():.6f}\n')
writer.add_scalar('test_loss', test_loss.item() , ep)
writer.close()
torch.save(tcn.state_dict(), args.model_save_path)
print('Finished Training')
return 0
if __name__ == "__main__":
run()
|
normal
|
{
"blob_id": "fe1a9804862942491b11b9baceecd37bf628fbb8",
"index": 8732,
"step-1": "<mask token>\n\n\ndef run():\n torch.manual_seed(1729)\n \"\"\" Setup \"\"\"\n args = parse()\n device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n print(device)\n \"\"\" Dataset \"\"\"\n train_dataset = AddTwoDataSet(N=args.N_train, seq_length=args.seq_length)\n test_dataset = AddTwoDataSet(N=args.N_test, seq_length=args.seq_length)\n train_loader = DataLoader(dataset=train_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n test_loader = DataLoader(dataset=test_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n \"\"\" TCN \"\"\"\n tcn = TCN(num_layers=args.num_layers, in_channels=args.in_channels,\n out_channels=args.out_channels, kernel_size=args.kernel_size,\n residual_blocks_channel_size=[args.res_block_size] * args.\n num_layers, bias=args.bias, dropout=args.dropout, stride=args.\n stride, dilations=None, leveledinit=args.leveledinit)\n tcn.to(device)\n if args.print:\n print(\n f'Number of learnable parameters : {sum(p.numel() for p in tcn.parameters() if p.requires_grad)}'\n )\n \"\"\" Training parameters\"\"\"\n criterion = nn.MSELoss()\n optimizer = optim.Adam(tcn.parameters(), lr=args.lr)\n \"\"\" Tensorboard \"\"\"\n writer = SummaryWriter(args.writer_path)\n for ep in range(1, args.epochs + 1):\n \"\"\" TRAIN \"\"\"\n tcn.train()\n total_loss = 0\n for i, data in enumerate(train_loader):\n x, y = data[0].to(device), data[1].to(device)\n optimizer.zero_grad()\n output = tcn(x)\n loss = F.mse_loss(output, y)\n loss.backward()\n if args.clip > 0:\n torch.nn.utils.clip_grad_norm_(model.parameters(), clip)\n optimizer.step()\n total_loss += loss.item()\n if i % args.log_interval == 0:\n cur_loss = total_loss / args.log_interval\n processed = min(i * args.batch_size, args.N_train)\n writer.add_scalar('training_loss', cur_loss, processed)\n if args.print:\n print(\n f'Train Epoch: {ep:2d}[{processed:6d}/{args.N_train:6d}({100.0 * processed / args.N_train:.0f}%)]\\tLearning rate: {args.lr:.4f}\\tLoss: {cur_loss:.6f}'\n )\n total_loss = 0\n \"\"\" EVALUATE \"\"\"\n tcn.eval()\n with torch.no_grad():\n for data in test_loader:\n x, y = data[0].to(device), data[1].to(device)\n output = tcn(x)\n test_loss = criterion(output, y)\n if args.print:\n print(f'\\nTest set: Average loss: {test_loss.item():.6f}\\n'\n )\n writer.add_scalar('test_loss', test_loss.item(), ep)\n writer.close()\n torch.save(tcn.state_dict(), args.model_save_path)\n print('Finished Training')\n return 0\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse():\n parser = argparse.ArgumentParser(description='Adding Problem')\n parser.add_argument('--N_train', type=int, default=50000, metavar='N_train'\n )\n parser.add_argument('--N_test', type=int, default=1000, metavar='N_test')\n parser.add_argument('--seq_length', type=int, default=200, metavar=\n 'seq_length')\n parser.add_argument('--batch_size', type=int, default=32, metavar=\n 'batch_size')\n parser.add_argument('--num_layers', type=int, default=8, metavar=\n 'num_layers')\n parser.add_argument('--in_channels', type=int, default=2, metavar=\n 'in_channels')\n parser.add_argument('--out_channels', type=int, default=1, metavar=\n 'out_channels')\n parser.add_argument('--kernel_size', type=int, default=7, metavar=\n 'kernel_size')\n parser.add_argument('--res_block_size', type=int, default=30, metavar=\n 'res_block_size')\n parser.add_argument('--bias', type=bool, default=True, metavar='bias')\n parser.add_argument('--dropout', type=float, default=0.0, metavar='dropout'\n )\n parser.add_argument('--stride', type=int, default=1, metavar='stride')\n parser.add_argument('--leveledinit', type=bool, default=False, metavar=\n 'leveledinit')\n parser.add_argument('--model_save_path', type=str, default=\n 'adding_problem/models/tcn_addtwo.pt', metavar='model_save_path')\n parser.add_argument('--epochs', type=int, default=10, metavar='epochs')\n parser.add_argument('--lr', type=float, default=0.002, metavar='lr')\n parser.add_argument('--clip', type=bool, default=False, metavar='clip')\n parser.add_argument('--log_interval', type=int, default=100, metavar=\n 'log_interval')\n parser.add_argument('--writer_path', type=str, default=\n 'adding_problem/sruns/add_two1', metavar='writer_path')\n parser.add_argument('--print', type=bool, default=False, metavar='print')\n parser.add_argument('--num_workers', type=int, default=0, metavar=\n 'num_workers')\n args = parser.parse_args()\n return args\n\n\ndef run():\n torch.manual_seed(1729)\n \"\"\" Setup \"\"\"\n args = parse()\n device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n print(device)\n \"\"\" Dataset \"\"\"\n train_dataset = AddTwoDataSet(N=args.N_train, seq_length=args.seq_length)\n test_dataset = AddTwoDataSet(N=args.N_test, seq_length=args.seq_length)\n train_loader = DataLoader(dataset=train_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n test_loader = DataLoader(dataset=test_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n \"\"\" TCN \"\"\"\n tcn = TCN(num_layers=args.num_layers, in_channels=args.in_channels,\n out_channels=args.out_channels, kernel_size=args.kernel_size,\n residual_blocks_channel_size=[args.res_block_size] * args.\n num_layers, bias=args.bias, dropout=args.dropout, stride=args.\n stride, dilations=None, leveledinit=args.leveledinit)\n tcn.to(device)\n if args.print:\n print(\n f'Number of learnable parameters : {sum(p.numel() for p in tcn.parameters() if p.requires_grad)}'\n )\n \"\"\" Training parameters\"\"\"\n criterion = nn.MSELoss()\n optimizer = optim.Adam(tcn.parameters(), lr=args.lr)\n \"\"\" Tensorboard \"\"\"\n writer = SummaryWriter(args.writer_path)\n for ep in range(1, args.epochs + 1):\n \"\"\" TRAIN \"\"\"\n tcn.train()\n total_loss = 0\n for i, data in enumerate(train_loader):\n x, y = data[0].to(device), data[1].to(device)\n optimizer.zero_grad()\n output = tcn(x)\n loss = F.mse_loss(output, y)\n loss.backward()\n if args.clip > 0:\n torch.nn.utils.clip_grad_norm_(model.parameters(), clip)\n optimizer.step()\n total_loss += loss.item()\n if i % args.log_interval == 0:\n cur_loss = total_loss / args.log_interval\n processed = min(i * args.batch_size, args.N_train)\n writer.add_scalar('training_loss', cur_loss, processed)\n if args.print:\n print(\n f'Train Epoch: {ep:2d}[{processed:6d}/{args.N_train:6d}({100.0 * processed / args.N_train:.0f}%)]\\tLearning rate: {args.lr:.4f}\\tLoss: {cur_loss:.6f}'\n )\n total_loss = 0\n \"\"\" EVALUATE \"\"\"\n tcn.eval()\n with torch.no_grad():\n for data in test_loader:\n x, y = data[0].to(device), data[1].to(device)\n output = tcn(x)\n test_loss = criterion(output, y)\n if args.print:\n print(f'\\nTest set: Average loss: {test_loss.item():.6f}\\n'\n )\n writer.add_scalar('test_loss', test_loss.item(), ep)\n writer.close()\n torch.save(tcn.state_dict(), args.model_save_path)\n print('Finished Training')\n return 0\n\n\n<mask token>\n",
"step-3": "<mask token>\nprint('Importing modules')\n<mask token>\nsys.path.append('')\nsys.path.append('../../')\n<mask token>\nprint('modules imported')\n\n\ndef parse():\n parser = argparse.ArgumentParser(description='Adding Problem')\n parser.add_argument('--N_train', type=int, default=50000, metavar='N_train'\n )\n parser.add_argument('--N_test', type=int, default=1000, metavar='N_test')\n parser.add_argument('--seq_length', type=int, default=200, metavar=\n 'seq_length')\n parser.add_argument('--batch_size', type=int, default=32, metavar=\n 'batch_size')\n parser.add_argument('--num_layers', type=int, default=8, metavar=\n 'num_layers')\n parser.add_argument('--in_channels', type=int, default=2, metavar=\n 'in_channels')\n parser.add_argument('--out_channels', type=int, default=1, metavar=\n 'out_channels')\n parser.add_argument('--kernel_size', type=int, default=7, metavar=\n 'kernel_size')\n parser.add_argument('--res_block_size', type=int, default=30, metavar=\n 'res_block_size')\n parser.add_argument('--bias', type=bool, default=True, metavar='bias')\n parser.add_argument('--dropout', type=float, default=0.0, metavar='dropout'\n )\n parser.add_argument('--stride', type=int, default=1, metavar='stride')\n parser.add_argument('--leveledinit', type=bool, default=False, metavar=\n 'leveledinit')\n parser.add_argument('--model_save_path', type=str, default=\n 'adding_problem/models/tcn_addtwo.pt', metavar='model_save_path')\n parser.add_argument('--epochs', type=int, default=10, metavar='epochs')\n parser.add_argument('--lr', type=float, default=0.002, metavar='lr')\n parser.add_argument('--clip', type=bool, default=False, metavar='clip')\n parser.add_argument('--log_interval', type=int, default=100, metavar=\n 'log_interval')\n parser.add_argument('--writer_path', type=str, default=\n 'adding_problem/sruns/add_two1', metavar='writer_path')\n parser.add_argument('--print', type=bool, default=False, metavar='print')\n parser.add_argument('--num_workers', type=int, default=0, metavar=\n 'num_workers')\n args = parser.parse_args()\n return args\n\n\ndef run():\n torch.manual_seed(1729)\n \"\"\" Setup \"\"\"\n args = parse()\n device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n print(device)\n \"\"\" Dataset \"\"\"\n train_dataset = AddTwoDataSet(N=args.N_train, seq_length=args.seq_length)\n test_dataset = AddTwoDataSet(N=args.N_test, seq_length=args.seq_length)\n train_loader = DataLoader(dataset=train_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n test_loader = DataLoader(dataset=test_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n \"\"\" TCN \"\"\"\n tcn = TCN(num_layers=args.num_layers, in_channels=args.in_channels,\n out_channels=args.out_channels, kernel_size=args.kernel_size,\n residual_blocks_channel_size=[args.res_block_size] * args.\n num_layers, bias=args.bias, dropout=args.dropout, stride=args.\n stride, dilations=None, leveledinit=args.leveledinit)\n tcn.to(device)\n if args.print:\n print(\n f'Number of learnable parameters : {sum(p.numel() for p in tcn.parameters() if p.requires_grad)}'\n )\n \"\"\" Training parameters\"\"\"\n criterion = nn.MSELoss()\n optimizer = optim.Adam(tcn.parameters(), lr=args.lr)\n \"\"\" Tensorboard \"\"\"\n writer = SummaryWriter(args.writer_path)\n for ep in range(1, args.epochs + 1):\n \"\"\" TRAIN \"\"\"\n tcn.train()\n total_loss = 0\n for i, data in enumerate(train_loader):\n x, y = data[0].to(device), data[1].to(device)\n optimizer.zero_grad()\n output = tcn(x)\n loss = F.mse_loss(output, y)\n loss.backward()\n if args.clip > 0:\n torch.nn.utils.clip_grad_norm_(model.parameters(), clip)\n optimizer.step()\n total_loss += loss.item()\n if i % args.log_interval == 0:\n cur_loss = total_loss / args.log_interval\n processed = min(i * args.batch_size, args.N_train)\n writer.add_scalar('training_loss', cur_loss, processed)\n if args.print:\n print(\n f'Train Epoch: {ep:2d}[{processed:6d}/{args.N_train:6d}({100.0 * processed / args.N_train:.0f}%)]\\tLearning rate: {args.lr:.4f}\\tLoss: {cur_loss:.6f}'\n )\n total_loss = 0\n \"\"\" EVALUATE \"\"\"\n tcn.eval()\n with torch.no_grad():\n for data in test_loader:\n x, y = data[0].to(device), data[1].to(device)\n output = tcn(x)\n test_loss = criterion(output, y)\n if args.print:\n print(f'\\nTest set: Average loss: {test_loss.item():.6f}\\n'\n )\n writer.add_scalar('test_loss', test_loss.item(), ep)\n writer.close()\n torch.save(tcn.state_dict(), args.model_save_path)\n print('Finished Training')\n return 0\n\n\nif __name__ == '__main__':\n run()\n",
"step-4": "<mask token>\nprint('Importing modules')\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nimport torch.nn.functional as F\nfrom torch.utils.tensorboard import SummaryWriter\nfrom torch.utils.data import DataLoader\nimport argparse\nimport sys\nsys.path.append('')\nsys.path.append('../../')\nfrom data import AddTwoDataSet\nfrom model import TCN\nprint('modules imported')\n\n\ndef parse():\n parser = argparse.ArgumentParser(description='Adding Problem')\n parser.add_argument('--N_train', type=int, default=50000, metavar='N_train'\n )\n parser.add_argument('--N_test', type=int, default=1000, metavar='N_test')\n parser.add_argument('--seq_length', type=int, default=200, metavar=\n 'seq_length')\n parser.add_argument('--batch_size', type=int, default=32, metavar=\n 'batch_size')\n parser.add_argument('--num_layers', type=int, default=8, metavar=\n 'num_layers')\n parser.add_argument('--in_channels', type=int, default=2, metavar=\n 'in_channels')\n parser.add_argument('--out_channels', type=int, default=1, metavar=\n 'out_channels')\n parser.add_argument('--kernel_size', type=int, default=7, metavar=\n 'kernel_size')\n parser.add_argument('--res_block_size', type=int, default=30, metavar=\n 'res_block_size')\n parser.add_argument('--bias', type=bool, default=True, metavar='bias')\n parser.add_argument('--dropout', type=float, default=0.0, metavar='dropout'\n )\n parser.add_argument('--stride', type=int, default=1, metavar='stride')\n parser.add_argument('--leveledinit', type=bool, default=False, metavar=\n 'leveledinit')\n parser.add_argument('--model_save_path', type=str, default=\n 'adding_problem/models/tcn_addtwo.pt', metavar='model_save_path')\n parser.add_argument('--epochs', type=int, default=10, metavar='epochs')\n parser.add_argument('--lr', type=float, default=0.002, metavar='lr')\n parser.add_argument('--clip', type=bool, default=False, metavar='clip')\n parser.add_argument('--log_interval', type=int, default=100, metavar=\n 'log_interval')\n parser.add_argument('--writer_path', type=str, default=\n 'adding_problem/sruns/add_two1', metavar='writer_path')\n parser.add_argument('--print', type=bool, default=False, metavar='print')\n parser.add_argument('--num_workers', type=int, default=0, metavar=\n 'num_workers')\n args = parser.parse_args()\n return args\n\n\ndef run():\n torch.manual_seed(1729)\n \"\"\" Setup \"\"\"\n args = parse()\n device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n print(device)\n \"\"\" Dataset \"\"\"\n train_dataset = AddTwoDataSet(N=args.N_train, seq_length=args.seq_length)\n test_dataset = AddTwoDataSet(N=args.N_test, seq_length=args.seq_length)\n train_loader = DataLoader(dataset=train_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n test_loader = DataLoader(dataset=test_dataset, batch_size=args.\n batch_size, shuffle=True, num_workers=args.num_workers)\n \"\"\" TCN \"\"\"\n tcn = TCN(num_layers=args.num_layers, in_channels=args.in_channels,\n out_channels=args.out_channels, kernel_size=args.kernel_size,\n residual_blocks_channel_size=[args.res_block_size] * args.\n num_layers, bias=args.bias, dropout=args.dropout, stride=args.\n stride, dilations=None, leveledinit=args.leveledinit)\n tcn.to(device)\n if args.print:\n print(\n f'Number of learnable parameters : {sum(p.numel() for p in tcn.parameters() if p.requires_grad)}'\n )\n \"\"\" Training parameters\"\"\"\n criterion = nn.MSELoss()\n optimizer = optim.Adam(tcn.parameters(), lr=args.lr)\n \"\"\" Tensorboard \"\"\"\n writer = SummaryWriter(args.writer_path)\n for ep in range(1, args.epochs + 1):\n \"\"\" TRAIN \"\"\"\n tcn.train()\n total_loss = 0\n for i, data in enumerate(train_loader):\n x, y = data[0].to(device), data[1].to(device)\n optimizer.zero_grad()\n output = tcn(x)\n loss = F.mse_loss(output, y)\n loss.backward()\n if args.clip > 0:\n torch.nn.utils.clip_grad_norm_(model.parameters(), clip)\n optimizer.step()\n total_loss += loss.item()\n if i % args.log_interval == 0:\n cur_loss = total_loss / args.log_interval\n processed = min(i * args.batch_size, args.N_train)\n writer.add_scalar('training_loss', cur_loss, processed)\n if args.print:\n print(\n f'Train Epoch: {ep:2d}[{processed:6d}/{args.N_train:6d}({100.0 * processed / args.N_train:.0f}%)]\\tLearning rate: {args.lr:.4f}\\tLoss: {cur_loss:.6f}'\n )\n total_loss = 0\n \"\"\" EVALUATE \"\"\"\n tcn.eval()\n with torch.no_grad():\n for data in test_loader:\n x, y = data[0].to(device), data[1].to(device)\n output = tcn(x)\n test_loss = criterion(output, y)\n if args.print:\n print(f'\\nTest set: Average loss: {test_loss.item():.6f}\\n'\n )\n writer.add_scalar('test_loss', test_loss.item(), ep)\n writer.close()\n torch.save(tcn.state_dict(), args.model_save_path)\n print('Finished Training')\n return 0\n\n\nif __name__ == '__main__':\n run()\n",
"step-5": "# addtwo_run-py\r\n\"\"\"\r\nTrain and test a TCN on the add two dataset.\r\nTrying to reproduce https://arxiv.org/abs/1803.01271.\r\n\"\"\"\r\nprint('Importing modules')\r\nimport torch\r\nimport torch.nn as nn\r\nimport torch.optim as optim\r\nimport torch.nn.functional as F\r\nfrom torch.utils.tensorboard import SummaryWriter\r\nfrom torch.utils.data import DataLoader\r\n\r\nimport argparse\r\nimport sys\r\nsys.path.append('')\r\nsys.path.append(\"../../\")\r\n\r\nfrom data import AddTwoDataSet\r\nfrom model import TCN\r\nprint('modules imported')\r\n\r\ndef parse():\r\n parser = argparse.ArgumentParser(description='Adding Problem')\r\n parser.add_argument(\r\n '--N_train', type=int, default=50000, metavar='N_train')\r\n parser.add_argument(\r\n '--N_test', type=int, default=1000, metavar='N_test')\r\n parser.add_argument(\r\n '--seq_length', type=int, default=200, metavar='seq_length')\r\n parser.add_argument(\r\n '--batch_size', type=int, default=32, metavar='batch_size')\r\n parser.add_argument(\r\n '--num_layers', type=int, default=8, metavar='num_layers')\r\n parser.add_argument(\r\n '--in_channels', type=int, default=2, metavar='in_channels')\r\n parser.add_argument(\r\n '--out_channels', type=int, default=1, metavar='out_channels')\r\n parser.add_argument(\r\n '--kernel_size', type=int, default=7, metavar='kernel_size')\r\n parser.add_argument(\r\n '--res_block_size', type=int, default=30, metavar='res_block_size')\r\n parser.add_argument(\r\n '--bias', type=bool, default=True, metavar='bias')\r\n parser.add_argument(\r\n '--dropout', type=float, default=0.0, metavar='dropout')\r\n parser.add_argument(\r\n '--stride', type=int, default=1, metavar='stride')\r\n parser.add_argument(\r\n '--leveledinit', type=bool, default=False, metavar='leveledinit')\r\n parser.add_argument(\r\n '--model_save_path', type=str, default='adding_problem/models/tcn_addtwo.pt', \r\n metavar='model_save_path')\r\n parser.add_argument(\r\n '--epochs', type=int, default=10, metavar='epochs')\r\n parser.add_argument(\r\n '--lr', type=float, default=2e-3, metavar='lr')\r\n parser.add_argument(\r\n '--clip', type=bool, default=False, metavar='clip')\r\n parser.add_argument(\r\n '--log_interval', type=int, default=100, metavar='log_interval')\r\n parser.add_argument(\r\n '--writer_path', type=str, default='adding_problem/sruns/add_two1', \r\n metavar='writer_path')\r\n parser.add_argument(\r\n '--print', type=bool, default=False, metavar='print')\r\n parser.add_argument(\r\n '--num_workers', type=int, default=0, metavar='num_workers')\r\n args = parser.parse_args()\r\n return args\r\n\r\ndef run():\r\n torch.manual_seed(1729)\r\n \r\n \"\"\" Setup \"\"\"\r\n args = parse()\r\n device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\r\n print(device)\r\n\r\n \"\"\" Dataset \"\"\"\r\n train_dataset = AddTwoDataSet(N=args.N_train, seq_length=args.seq_length)\r\n test_dataset = AddTwoDataSet(N=args.N_test, seq_length=args.seq_length)\r\n train_loader = DataLoader(\r\n dataset=train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)\r\n test_loader = DataLoader(\r\n dataset=test_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)\r\n\r\n \"\"\" TCN \"\"\"\r\n tcn = TCN(\r\n num_layers=args.num_layers,\r\n in_channels=args.in_channels,\r\n out_channels=args.out_channels,\r\n kernel_size=args.kernel_size,\r\n residual_blocks_channel_size=[args.res_block_size] * args.num_layers,\r\n bias=args.bias,\r\n dropout=args.dropout,\r\n stride=args.stride,\r\n dilations=None,\r\n leveledinit=args.leveledinit)\r\n tcn.to(device)\r\n if args.print:\r\n print(\r\n f\"\"\"Number of learnable parameters : {\r\n sum(p.numel() for p in tcn.parameters() if p.requires_grad)}\"\"\")\r\n\r\n \"\"\" Training parameters\"\"\"\r\n criterion = nn.MSELoss()\r\n optimizer = optim.Adam(tcn.parameters(), lr=args.lr)\r\n\r\n \"\"\" Tensorboard \"\"\"\r\n writer = SummaryWriter(args.writer_path)\r\n\r\n for ep in range(1, args.epochs+1):\r\n \"\"\" TRAIN \"\"\"\r\n tcn.train()\r\n total_loss = 0\r\n for i, data in enumerate(train_loader):\r\n x, y = data[0].to(device), data[1].to(device)\r\n optimizer.zero_grad()\r\n output = tcn(x)\r\n loss = F.mse_loss(output, y)\r\n loss.backward()\r\n if args.clip > 0:\r\n torch.nn.utils.clip_grad_norm_(model.parameters(), clip)\r\n optimizer.step()\r\n total_loss += loss.item()\r\n\r\n if i % args.log_interval == 0:\r\n cur_loss = total_loss / args.log_interval\r\n processed = min(i*args.batch_size, args.N_train)\r\n writer.add_scalar('training_loss', cur_loss, processed)\r\n if args.print:\r\n print(\r\n (f\"Train Epoch: {ep:2d}\"\r\n f\"[{processed:6d}/{args.N_train:6d}\"\r\n f\"({100.*processed/args.N_train:.0f}%)]\"\r\n f\"\\tLearning rate: {args.lr:.4f}\\tLoss: {cur_loss:.6f}\"))\r\n total_loss = 0\r\n \"\"\" EVALUATE \"\"\"\r\n tcn.eval()\r\n with torch.no_grad():\r\n for data in test_loader:\r\n x, y = data[0].to(device), data[1].to(device)\r\n output = tcn(x)\r\n test_loss = criterion(output, y)\r\n if args.print:\r\n print(\r\n f'\\nTest set: Average loss: {test_loss.item():.6f}\\n')\r\n writer.add_scalar('test_loss', test_loss.item() , ep)\r\n\r\n writer.close()\r\n torch.save(tcn.state_dict(), args.model_save_path)\r\n print('Finished Training')\r\n return 0\r\n\r\nif __name__ == \"__main__\":\r\n run()\r\n\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
class IOString:
<|reserved_special_token_0|>
def get_String(self):
self.str1 = input()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class IOString:
<|reserved_special_token_0|>
def get_String(self):
self.str1 = input()
def print_String(self):
print(self.str1.upper())
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class IOString:
def __init__(self):
self.str1 = ''
def get_String(self):
self.str1 = input()
def print_String(self):
print(self.str1.upper())
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class IOString:
def __init__(self):
self.str1 = ''
def get_String(self):
self.str1 = input()
def print_String(self):
print(self.str1.upper())
<|reserved_special_token_0|>
str1.get_String()
str1.print_String()
<|reserved_special_token_1|>
#Exercise 2 - Write a Python class which has two methods get_String and print_String. get_String accept a string
#from the user and print_String print the string in upper case
#string will be an input to a get_string method and whatever you put in will print when you make the print screen method
class IOString():
def __init__(self):
self.str1 = ""
def get_String(self):
self.str1 = input()
def print_String(self):
print(self.str1.upper())
str1 = IOString()
str1.get_String()
str1.print_String()
|
flexible
|
{
"blob_id": "cf2973b94f1113013fe9baa946202ec75488f7d2",
"index": 9697,
"step-1": "class IOString:\n <mask token>\n\n def get_String(self):\n self.str1 = input()\n <mask token>\n\n\n<mask token>\n",
"step-2": "class IOString:\n <mask token>\n\n def get_String(self):\n self.str1 = input()\n\n def print_String(self):\n print(self.str1.upper())\n\n\n<mask token>\n",
"step-3": "class IOString:\n\n def __init__(self):\n self.str1 = ''\n\n def get_String(self):\n self.str1 = input()\n\n def print_String(self):\n print(self.str1.upper())\n\n\n<mask token>\n",
"step-4": "class IOString:\n\n def __init__(self):\n self.str1 = ''\n\n def get_String(self):\n self.str1 = input()\n\n def print_String(self):\n print(self.str1.upper())\n\n\n<mask token>\nstr1.get_String()\nstr1.print_String()\n",
"step-5": "#Exercise 2 - Write a Python class which has two methods get_String and print_String. get_String accept a string\n#from the user and print_String print the string in upper case\n#string will be an input to a get_string method and whatever you put in will print when you make the print screen method\n\nclass IOString():\n def __init__(self):\n self.str1 = \"\"\n \n def get_String(self):\n self.str1 = input()\n \n def print_String(self):\n print(self.str1.upper())\n \nstr1 = IOString()\nstr1.get_String()\nstr1.print_String()",
"step-ids": [
2,
3,
4,
5,
7
]
}
|
[
2,
3,
4,
5,
7
] |
class Solution:
def minWindow(self, s: str, t: str) -> str:
char_cnt = {}
for character in t:
if character not in char_cnt:
char_cnt[character] = 1
else:
char_cnt[character] += 1
dq = [] # add index & character
min_substring = None
for i in range(len(s)):
if s[i] in t:
char_cnt[s[i]] -= 1
dq.append((i, s[i]))
while len(dq) > 0 and char_cnt[dq[0][1]] < 0:
char_cnt[dq[0][1]] += 1
del dq[0]
containAll = True
for char in char_cnt:
if char_cnt[char] > 0:
containAll = False
break
if containAll:
substring = s[dq[0][0]:dq[-1][0]+1]
if min_substring is None or len(substring) < len(min_substring):
min_substring = substring
return min_substring if min_substring else ""
|
normal
|
{
"blob_id": "22706d7d9c04bb660c9bf0df66de89ed6bd480c2",
"index": 8210,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def minWindow(self, s: str, t: str) ->str:\n char_cnt = {}\n for character in t:\n if character not in char_cnt:\n char_cnt[character] = 1\n else:\n char_cnt[character] += 1\n dq = []\n min_substring = None\n for i in range(len(s)):\n if s[i] in t:\n char_cnt[s[i]] -= 1\n dq.append((i, s[i]))\n while len(dq) > 0 and char_cnt[dq[0][1]] < 0:\n char_cnt[dq[0][1]] += 1\n del dq[0]\n containAll = True\n for char in char_cnt:\n if char_cnt[char] > 0:\n containAll = False\n break\n if containAll:\n substring = s[dq[0][0]:dq[-1][0] + 1]\n if min_substring is None or len(substring) < len(\n min_substring):\n min_substring = substring\n return min_substring if min_substring else ''\n",
"step-4": "class Solution:\n def minWindow(self, s: str, t: str) -> str:\n char_cnt = {}\n for character in t:\n if character not in char_cnt:\n char_cnt[character] = 1\n else:\n char_cnt[character] += 1\n\n dq = [] # add index & character\n min_substring = None\n for i in range(len(s)):\n if s[i] in t:\n char_cnt[s[i]] -= 1\n dq.append((i, s[i]))\n while len(dq) > 0 and char_cnt[dq[0][1]] < 0:\n char_cnt[dq[0][1]] += 1\n del dq[0]\n containAll = True\n for char in char_cnt:\n if char_cnt[char] > 0:\n containAll = False\n break\n if containAll:\n substring = s[dq[0][0]:dq[-1][0]+1]\n if min_substring is None or len(substring) < len(min_substring):\n min_substring = substring\n return min_substring if min_substring else \"\"\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import codecs
import json
#~ from lxml import etree
import lxml.html
target = "test/index.html"
url = "http://de.wikipedia.org/wiki/Liste_von_Bergen_in_der_Schweiz"
command = "wget %s -O %s" % (url, target)
#~ os.popen(command)
f = open(target)
html = lxml.html.fromstring(f.read())
f.close()
tables = html.xpath("//table")
table = tables[2]
rows = table.xpath("//tr/th")
#~ row = rows[2]
#~ ths = row.xpath("th")
#~ print len(rows)
for cell in rows[:8]:
text = cell.xpath("string()").replace("(km)","").replace("(m)","")
text = text.strip()
print text
#~ f = codecs.open("out.html","w", encoding="utf-8")
f_out = codecs.open("out.json","w", encoding="utf-8")
rows = table.xpath("//tr")
print len(rows)
#~ liste = {}
liste = []
for i, row in enumerate(rows):
cells = row.xpath("td")
if len(cells)==8:
#~ print cells[1].xpath("string()")
#~ cell = cells[1]
out = []
for cell in cells[1:3]:
links = cell.xpath("a")
if links:
out.append(links[0].xpath("string()"))
else:
out.append(cell.xpath("string()"))
#~ liste.update({"n%s"% (i):{"name":out[0], "hight":out[1]}})
liste.append({"name":out[0], "hight":out[1]})
#~ f.write('<li><a data-icon="info" data-rel="dialog" data-transition="pop" href="#no_info">%s (%s)</a></li>\n' % (out[0], out[1]))
#~ f.close()
f_out.write(json.dumps({"mountains" : {"Switzerland" : liste}}))
f_out.close()
#~ for table in tables:
#~ print len(table)
print lxml.html.tostring(table)[:100]
|
normal
|
{
"blob_id": "89499ea8dd02d5e1b2ff635ab5203a65ceee4276",
"index": 8536,
"step-1": "import os\nimport codecs\nimport json\n#~ from lxml import etree\nimport lxml.html\n\ntarget = \"test/index.html\"\nurl = \"http://de.wikipedia.org/wiki/Liste_von_Bergen_in_der_Schweiz\"\ncommand = \"wget %s -O %s\" % (url, target)\n#~ os.popen(command)\n\nf = open(target)\nhtml = lxml.html.fromstring(f.read())\nf.close()\n\ntables = html.xpath(\"//table\")\ntable = tables[2]\n\nrows = table.xpath(\"//tr/th\")\n#~ row = rows[2]\n#~ ths = row.xpath(\"th\")\n#~ print len(rows)\nfor cell in rows[:8]:\n text = cell.xpath(\"string()\").replace(\"(km)\",\"\").replace(\"(m)\",\"\")\n text = text.strip()\n print text\n\n#~ f = codecs.open(\"out.html\",\"w\", encoding=\"utf-8\")\nf_out = codecs.open(\"out.json\",\"w\", encoding=\"utf-8\")\n\nrows = table.xpath(\"//tr\")\nprint len(rows)\n#~ liste = {}\nliste = []\nfor i, row in enumerate(rows):\n cells = row.xpath(\"td\")\n if len(cells)==8:\n #~ print cells[1].xpath(\"string()\")\n #~ cell = cells[1]\n out = []\n for cell in cells[1:3]:\n links = cell.xpath(\"a\")\n if links:\n out.append(links[0].xpath(\"string()\"))\n else:\n out.append(cell.xpath(\"string()\"))\n\t\t#~ liste.update({\"n%s\"% (i):{\"name\":out[0], \"hight\":out[1]}})\n liste.append({\"name\":out[0], \"hight\":out[1]})\n \n \n #~ f.write('<li><a data-icon=\"info\" data-rel=\"dialog\" data-transition=\"pop\" href=\"#no_info\">%s (%s)</a></li>\\n' % (out[0], out[1]))\n\n#~ f.close()\n\nf_out.write(json.dumps({\"mountains\" : {\"Switzerland\" : liste}}))\nf_out.close()\n\n#~ for table in tables:\n #~ print len(table)\n\n\nprint lxml.html.tostring(table)[:100]\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
print('Load a ply point cloud, print it, and render it')
pcd = read_point_cloud('11.ply')
""" read_point_cloud reads a point cloud from a file.
It tries to decode the file based on the extension name.
The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts."""
pcd = read_point_cloud('TestData/fragment.ply')
print(pcd)
tmp = np.asarray(pcd.points)
print(tmp[0:5, 0:3])
print('Downsample the point cloud with a voxel of 0.005')
downpcd = voxel_down_sample(pcd, voxel_size=0.05)
draw_geometries([downpcd])
estimate_normals(downpcd, search_param=KDTreeSearchParamHybrid(radius=
0.1, max_nn=30))
draw_geometries([downpcd])
print('Load a polygon volume and use it to crop the original point cloud')
vol = read_selection_polygon_volume('TestData/Crop/cropped.json')
chair = vol.crop_point_cloud(pcd)
print('')
print('Paint chair')
chair.paint_uniform_color([1, 0.706, 0])
print('')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
print('Load a ply point cloud, print it, and render it')
pcd = read_point_cloud('11.ply')
""" read_point_cloud reads a point cloud from a file.
It tries to decode the file based on the extension name.
The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts."""
pcd = read_point_cloud('TestData/fragment.ply')
print(pcd)
tmp = np.asarray(pcd.points)
print(tmp[0:5, 0:3])
print('Downsample the point cloud with a voxel of 0.005')
downpcd = voxel_down_sample(pcd, voxel_size=0.05)
draw_geometries([downpcd])
estimate_normals(downpcd, search_param=KDTreeSearchParamHybrid(radius=
0.1, max_nn=30))
draw_geometries([downpcd])
print('Load a polygon volume and use it to crop the original point cloud')
vol = read_selection_polygon_volume('TestData/Crop/cropped.json')
chair = vol.crop_point_cloud(pcd)
print('')
print('Paint chair')
chair.paint_uniform_color([1, 0.706, 0])
print('')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
from open3d import *
def main():
print('Load a ply point cloud, print it, and render it')
pcd = read_point_cloud('11.ply')
""" read_point_cloud reads a point cloud from a file.
It tries to decode the file based on the extension name.
The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts."""
pcd = read_point_cloud('TestData/fragment.ply')
print(pcd)
tmp = np.asarray(pcd.points)
print(tmp[0:5, 0:3])
print('Downsample the point cloud with a voxel of 0.005')
downpcd = voxel_down_sample(pcd, voxel_size=0.05)
draw_geometries([downpcd])
estimate_normals(downpcd, search_param=KDTreeSearchParamHybrid(radius=
0.1, max_nn=30))
draw_geometries([downpcd])
print('Load a polygon volume and use it to crop the original point cloud')
vol = read_selection_polygon_volume('TestData/Crop/cropped.json')
chair = vol.crop_point_cloud(pcd)
print('')
print('Paint chair')
chair.paint_uniform_color([1, 0.706, 0])
print('')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2018 Cesar Sinchiguano <cesarsinchiguano@hotmail.es>
#
# Distributed under terms of the BSD license.
"""
"""
import numpy as np
from open3d import *
def main():
print("Load a ply point cloud, print it, and render it")
pcd = read_point_cloud("11.ply")
''' read_point_cloud reads a point cloud from a file.
It tries to decode the file based on the extension name.
The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts.'''
pcd = read_point_cloud("TestData/fragment.ply")
print(pcd)
# print("Load a ply point cloud, print it, and render it")
# pcd = read_point_cloud("bun0.pcd")
# print(pcd)
tmp=np.asarray(pcd.points)
print(tmp[0:5,0:3])#rows and column
#draw_geometries([pcd])
print("Downsample the point cloud with a voxel of 0.005")
downpcd = voxel_down_sample(pcd, voxel_size = 0.05)
draw_geometries([downpcd])
# print("Recompute the normal of the downsampled point cloud")
estimate_normals(downpcd, search_param = KDTreeSearchParamHybrid(radius = 0.1, max_nn = 30))
draw_geometries([downpcd])
# print("Print a normal vector of the 0th point")
# print(downpcd.normals[0])
# print("Print the normal vectors of the first 10 points")
# print(np.asarray(downpcd.normals)[:10,:])
# print("")
print("Load a polygon volume and use it to crop the original point cloud")
vol = read_selection_polygon_volume("TestData/Crop/cropped.json")
chair = vol.crop_point_cloud(pcd)
#draw_geometries([chair])
print("")
print("Paint chair")
chair.paint_uniform_color([1, 0.706, 0])
#draw_geometries([chair])
print("")
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "30e8e269cf6500ab804566a85c9b96b3ef9bda36",
"index": 4143,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n print('Load a ply point cloud, print it, and render it')\n pcd = read_point_cloud('11.ply')\n \"\"\" read_point_cloud reads a point cloud from a file.\n It tries to decode the file based on the extension name.\n The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts.\"\"\"\n pcd = read_point_cloud('TestData/fragment.ply')\n print(pcd)\n tmp = np.asarray(pcd.points)\n print(tmp[0:5, 0:3])\n print('Downsample the point cloud with a voxel of 0.005')\n downpcd = voxel_down_sample(pcd, voxel_size=0.05)\n draw_geometries([downpcd])\n estimate_normals(downpcd, search_param=KDTreeSearchParamHybrid(radius=\n 0.1, max_nn=30))\n draw_geometries([downpcd])\n print('Load a polygon volume and use it to crop the original point cloud')\n vol = read_selection_polygon_volume('TestData/Crop/cropped.json')\n chair = vol.crop_point_cloud(pcd)\n print('')\n print('Paint chair')\n chair.paint_uniform_color([1, 0.706, 0])\n print('')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n print('Load a ply point cloud, print it, and render it')\n pcd = read_point_cloud('11.ply')\n \"\"\" read_point_cloud reads a point cloud from a file.\n It tries to decode the file based on the extension name.\n The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts.\"\"\"\n pcd = read_point_cloud('TestData/fragment.ply')\n print(pcd)\n tmp = np.asarray(pcd.points)\n print(tmp[0:5, 0:3])\n print('Downsample the point cloud with a voxel of 0.005')\n downpcd = voxel_down_sample(pcd, voxel_size=0.05)\n draw_geometries([downpcd])\n estimate_normals(downpcd, search_param=KDTreeSearchParamHybrid(radius=\n 0.1, max_nn=30))\n draw_geometries([downpcd])\n print('Load a polygon volume and use it to crop the original point cloud')\n vol = read_selection_polygon_volume('TestData/Crop/cropped.json')\n chair = vol.crop_point_cloud(pcd)\n print('')\n print('Paint chair')\n chair.paint_uniform_color([1, 0.706, 0])\n print('')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nimport numpy as np\nfrom open3d import *\n\n\ndef main():\n print('Load a ply point cloud, print it, and render it')\n pcd = read_point_cloud('11.ply')\n \"\"\" read_point_cloud reads a point cloud from a file.\n It tries to decode the file based on the extension name.\n The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts.\"\"\"\n pcd = read_point_cloud('TestData/fragment.ply')\n print(pcd)\n tmp = np.asarray(pcd.points)\n print(tmp[0:5, 0:3])\n print('Downsample the point cloud with a voxel of 0.005')\n downpcd = voxel_down_sample(pcd, voxel_size=0.05)\n draw_geometries([downpcd])\n estimate_normals(downpcd, search_param=KDTreeSearchParamHybrid(radius=\n 0.1, max_nn=30))\n draw_geometries([downpcd])\n print('Load a polygon volume and use it to crop the original point cloud')\n vol = read_selection_polygon_volume('TestData/Crop/cropped.json')\n chair = vol.crop_point_cloud(pcd)\n print('')\n print('Paint chair')\n chair.paint_uniform_color([1, 0.706, 0])\n print('')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#! /usr/bin/env python\n# -*- coding: utf-8 -*-\n# vim:fenc=utf-8\n#\n# Copyright © 2018 Cesar Sinchiguano <cesarsinchiguano@hotmail.es>\n#\n# Distributed under terms of the BSD license.\n\n\"\"\"\n\n\"\"\"\nimport numpy as np\nfrom open3d import *\n\ndef main():\n print(\"Load a ply point cloud, print it, and render it\")\n pcd = read_point_cloud(\"11.ply\")\n ''' read_point_cloud reads a point cloud from a file.\n It tries to decode the file based on the extension name.\n The supported extension names are: pcd, ply, xyz, xyzrgb, xyzn, pts.'''\n pcd = read_point_cloud(\"TestData/fragment.ply\")\n\n print(pcd)\n\n # print(\"Load a ply point cloud, print it, and render it\")\n # pcd = read_point_cloud(\"bun0.pcd\")\n # print(pcd)\n tmp=np.asarray(pcd.points)\n print(tmp[0:5,0:3])#rows and column\n #draw_geometries([pcd])\n\n print(\"Downsample the point cloud with a voxel of 0.005\")\n downpcd = voxel_down_sample(pcd, voxel_size = 0.05)\n draw_geometries([downpcd])\n\n # print(\"Recompute the normal of the downsampled point cloud\")\n estimate_normals(downpcd, search_param = KDTreeSearchParamHybrid(radius = 0.1, max_nn = 30))\n draw_geometries([downpcd])\n\n # print(\"Print a normal vector of the 0th point\")\n # print(downpcd.normals[0])\n # print(\"Print the normal vectors of the first 10 points\")\n # print(np.asarray(downpcd.normals)[:10,:])\n # print(\"\")\n\n print(\"Load a polygon volume and use it to crop the original point cloud\")\n vol = read_selection_polygon_volume(\"TestData/Crop/cropped.json\")\n chair = vol.crop_point_cloud(pcd)\n #draw_geometries([chair])\n print(\"\")\n\n print(\"Paint chair\")\n chair.paint_uniform_color([1, 0.706, 0])\n #draw_geometries([chair])\n print(\"\")\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
from cost_functions import trajectory_cost_fn
import time
class Controller():
def __init__(self):
pass
# Get the appropriate action(s) for this state(s)
def get_action(self, state):
pass
class RandomController(Controller):
def __init__(self, env):
""" YOUR CODE HERE """
pass
def get_action(self, state):
""" YOUR CODE HERE """
""" Your code should randomly sample an action uniformly from the action space """
pass
class MPCcontroller(Controller):
""" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 """
def __init__(self,
env,
dyn_model,
horizon=5,
cost_fn=None,
num_simulated_paths=10,
):
self.env = env
self.dyn_model = dyn_model
self.horizon = horizon
self.cost_fn = cost_fn
self.num_simulated_paths = num_simulated_paths
def get_action(self, state):
""" YOUR CODE HERE """
""" Note: be careful to batch your simulations through the model for speed """
|
normal
|
{
"blob_id": "7112eb52aea9be6f8e682b4dacc6b615365c8cea",
"index": 7510,
"step-1": "<mask token>\n\n\nclass Controller:\n <mask token>\n <mask token>\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-2": "<mask token>\n\n\nclass Controller:\n <mask token>\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-3": "<mask token>\n\n\nclass Controller:\n\n def __init__(self):\n pass\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-4": "import numpy as np\nfrom cost_functions import trajectory_cost_fn\nimport time\n\n\nclass Controller:\n\n def __init__(self):\n pass\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-5": "import numpy as np\r\nfrom cost_functions import trajectory_cost_fn\r\nimport time\r\n\r\nclass Controller():\r\n\tdef __init__(self):\r\n\t\tpass\r\n\r\n\t# Get the appropriate action(s) for this state(s)\r\n\tdef get_action(self, state):\r\n\t\tpass\r\n\r\n\r\nclass RandomController(Controller):\r\n\tdef __init__(self, env):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\tpass\r\n\r\n\tdef get_action(self, state):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\t\"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\r\n\t\tpass\r\n\r\n\r\nclass MPCcontroller(Controller):\r\n\t\"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\r\n\tdef __init__(self, \r\n\t\t\t\t env, \r\n\t\t\t\t dyn_model, \r\n\t\t\t\t horizon=5, \r\n\t\t\t\t cost_fn=None, \r\n\t\t\t\t num_simulated_paths=10,\r\n\t\t\t\t ):\r\n\t\tself.env = env\r\n\t\tself.dyn_model = dyn_model\r\n\t\tself.horizon = horizon\r\n\t\tself.cost_fn = cost_fn\r\n\t\tself.num_simulated_paths = num_simulated_paths\r\n\r\n\tdef get_action(self, state):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\t\"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\r\n\r\n",
"step-ids": [
8,
9,
10,
11,
12
]
}
|
[
8,
9,
10,
11,
12
] |
from .. import CURRENT_NAME
from ..cmd import call_cmd
from .config import Configurator
from .config import USER_INI
from icemac.install.addressbook._compat import Path
import argparse
import os
import pdb # noqa: T002
import sys
def update(stdin=None):
"""Update the current address book installation."""
curr_path = Path.cwd() / CURRENT_NAME
if not curr_path.exists():
print("ERROR: There is no symlink named {!r} in the current"
" directory.".format(CURRENT_NAME))
print("This script cannot be called here.")
sys.exit(-1)
if (curr_path / 'buildout.cfg').exists():
print("ERROR: '{}/buildout.cfg' already exists please (re-) move"
" it.".format(CURRENT_NAME))
sys.exit(-2)
cwd = os.getcwd()
os.chdir(str(curr_path)) # PY2: in PY3 `str` is no longer needed
configurator = Configurator(
curr_path / USER_INI, install_new_version=False, stdin=stdin)
try:
configurator()
call_cmd('running bin/buildout', '../bin/buildout')
if configurator.restart_server == 'yes':
call_cmd('Restarting instance', 'bin/svctl', 'restart', 'all')
finally:
os.chdir(str(cwd)) # PY2: in PY3 `str` is no longer needed
print('Done.')
def main(args=None):
"""Entry point for `bin/change-addressbook-config`."""
parser = argparse.ArgumentParser(
description='Update the current address book installation.')
parser.add_argument(
'--debug', action="store_true",
help='Enter debugger on errors.')
args = parser.parse_args(args)
try:
update()
except Exception:
if args.debug:
pdb.post_mortem()
else:
raise
|
normal
|
{
"blob_id": "f5274f5d838d484ca0c1cc5a5192a2fd698cf827",
"index": 9432,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef update(stdin=None):\n \"\"\"Update the current address book installation.\"\"\"\n curr_path = Path.cwd() / CURRENT_NAME\n if not curr_path.exists():\n print('ERROR: There is no symlink named {!r} in the current directory.'\n .format(CURRENT_NAME))\n print('This script cannot be called here.')\n sys.exit(-1)\n if (curr_path / 'buildout.cfg').exists():\n print(\"ERROR: '{}/buildout.cfg' already exists please (re-) move it.\"\n .format(CURRENT_NAME))\n sys.exit(-2)\n cwd = os.getcwd()\n os.chdir(str(curr_path))\n configurator = Configurator(curr_path / USER_INI, install_new_version=\n False, stdin=stdin)\n try:\n configurator()\n call_cmd('running bin/buildout', '../bin/buildout')\n if configurator.restart_server == 'yes':\n call_cmd('Restarting instance', 'bin/svctl', 'restart', 'all')\n finally:\n os.chdir(str(cwd))\n print('Done.')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef update(stdin=None):\n \"\"\"Update the current address book installation.\"\"\"\n curr_path = Path.cwd() / CURRENT_NAME\n if not curr_path.exists():\n print('ERROR: There is no symlink named {!r} in the current directory.'\n .format(CURRENT_NAME))\n print('This script cannot be called here.')\n sys.exit(-1)\n if (curr_path / 'buildout.cfg').exists():\n print(\"ERROR: '{}/buildout.cfg' already exists please (re-) move it.\"\n .format(CURRENT_NAME))\n sys.exit(-2)\n cwd = os.getcwd()\n os.chdir(str(curr_path))\n configurator = Configurator(curr_path / USER_INI, install_new_version=\n False, stdin=stdin)\n try:\n configurator()\n call_cmd('running bin/buildout', '../bin/buildout')\n if configurator.restart_server == 'yes':\n call_cmd('Restarting instance', 'bin/svctl', 'restart', 'all')\n finally:\n os.chdir(str(cwd))\n print('Done.')\n\n\ndef main(args=None):\n \"\"\"Entry point for `bin/change-addressbook-config`.\"\"\"\n parser = argparse.ArgumentParser(description=\n 'Update the current address book installation.')\n parser.add_argument('--debug', action='store_true', help=\n 'Enter debugger on errors.')\n args = parser.parse_args(args)\n try:\n update()\n except Exception:\n if args.debug:\n pdb.post_mortem()\n else:\n raise\n",
"step-4": "from .. import CURRENT_NAME\nfrom ..cmd import call_cmd\nfrom .config import Configurator\nfrom .config import USER_INI\nfrom icemac.install.addressbook._compat import Path\nimport argparse\nimport os\nimport pdb\nimport sys\n\n\ndef update(stdin=None):\n \"\"\"Update the current address book installation.\"\"\"\n curr_path = Path.cwd() / CURRENT_NAME\n if not curr_path.exists():\n print('ERROR: There is no symlink named {!r} in the current directory.'\n .format(CURRENT_NAME))\n print('This script cannot be called here.')\n sys.exit(-1)\n if (curr_path / 'buildout.cfg').exists():\n print(\"ERROR: '{}/buildout.cfg' already exists please (re-) move it.\"\n .format(CURRENT_NAME))\n sys.exit(-2)\n cwd = os.getcwd()\n os.chdir(str(curr_path))\n configurator = Configurator(curr_path / USER_INI, install_new_version=\n False, stdin=stdin)\n try:\n configurator()\n call_cmd('running bin/buildout', '../bin/buildout')\n if configurator.restart_server == 'yes':\n call_cmd('Restarting instance', 'bin/svctl', 'restart', 'all')\n finally:\n os.chdir(str(cwd))\n print('Done.')\n\n\ndef main(args=None):\n \"\"\"Entry point for `bin/change-addressbook-config`.\"\"\"\n parser = argparse.ArgumentParser(description=\n 'Update the current address book installation.')\n parser.add_argument('--debug', action='store_true', help=\n 'Enter debugger on errors.')\n args = parser.parse_args(args)\n try:\n update()\n except Exception:\n if args.debug:\n pdb.post_mortem()\n else:\n raise\n",
"step-5": "from .. import CURRENT_NAME\nfrom ..cmd import call_cmd\nfrom .config import Configurator\nfrom .config import USER_INI\nfrom icemac.install.addressbook._compat import Path\nimport argparse\nimport os\nimport pdb # noqa: T002\nimport sys\n\n\ndef update(stdin=None):\n \"\"\"Update the current address book installation.\"\"\"\n curr_path = Path.cwd() / CURRENT_NAME\n if not curr_path.exists():\n print(\"ERROR: There is no symlink named {!r} in the current\"\n \" directory.\".format(CURRENT_NAME))\n print(\"This script cannot be called here.\")\n sys.exit(-1)\n\n if (curr_path / 'buildout.cfg').exists():\n print(\"ERROR: '{}/buildout.cfg' already exists please (re-) move\"\n \" it.\".format(CURRENT_NAME))\n sys.exit(-2)\n\n cwd = os.getcwd()\n os.chdir(str(curr_path)) # PY2: in PY3 `str` is no longer needed\n configurator = Configurator(\n curr_path / USER_INI, install_new_version=False, stdin=stdin)\n try:\n configurator()\n call_cmd('running bin/buildout', '../bin/buildout')\n if configurator.restart_server == 'yes':\n call_cmd('Restarting instance', 'bin/svctl', 'restart', 'all')\n finally:\n os.chdir(str(cwd)) # PY2: in PY3 `str` is no longer needed\n print('Done.')\n\n\ndef main(args=None):\n \"\"\"Entry point for `bin/change-addressbook-config`.\"\"\"\n parser = argparse.ArgumentParser(\n description='Update the current address book installation.')\n parser.add_argument(\n '--debug', action=\"store_true\",\n help='Enter debugger on errors.')\n\n args = parser.parse_args(args)\n try:\n update()\n except Exception:\n if args.debug:\n pdb.post_mortem()\n else:\n raise\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import requests as r
from .security import Security, Securities
from .data import Data
url_base = 'https://www.alphavantage.co/query'
def _build_url(**kargs):
query = {
'function': 'TIME_SERIES_DAILY',
'symbol': 'SPY',
'outputsize': 'full',
'datatype': 'json',
'apikey': 'JPIO2GNGBMFRLGMN'
}
query.update(kargs)
query_str = '&'.join([f'{key}={val}' for key, val in query.items()])
return f'{url_base}?{query_str}'
def _request(**kargs):
url = _build_url(**kargs)
return r.get(url)
def _get_symbol(symbol, **kargs):
kargs['symbol'] = symbol
kargs['datatype'] = 'csv'
req = _request(**kargs)
# Reverse dates to past to present
text = req.text
header, *text = text.split()
text = '\n'.join(
[l for l in text[::-1]]
)
csv_str = f'{header}\n{text}'
data = Data.load_csv(csv_str)
return Security(symbol, data)
def get(symbols, **kargs):
if not isinstance(symbols, list):
symbols = [symbols]
result = Securities()
for symbol in symbols:
kargs['symbol'] = symbol
result.add(
id=symbol,
security=_get_symbol(**kargs)
)
return result
|
normal
|
{
"blob_id": "e99d3ae82d8eea38d29d6c4f09fdb3858e36ca50",
"index": 6518,
"step-1": "<mask token>\n\n\ndef _build_url(**kargs):\n query = {'function': 'TIME_SERIES_DAILY', 'symbol': 'SPY', 'outputsize':\n 'full', 'datatype': 'json', 'apikey': 'JPIO2GNGBMFRLGMN'}\n query.update(kargs)\n query_str = '&'.join([f'{key}={val}' for key, val in query.items()])\n return f'{url_base}?{query_str}'\n\n\n<mask token>\n\n\ndef _get_symbol(symbol, **kargs):\n kargs['symbol'] = symbol\n kargs['datatype'] = 'csv'\n req = _request(**kargs)\n text = req.text\n header, *text = text.split()\n text = '\\n'.join([l for l in text[::-1]])\n csv_str = f'{header}\\n{text}'\n data = Data.load_csv(csv_str)\n return Security(symbol, data)\n\n\ndef get(symbols, **kargs):\n if not isinstance(symbols, list):\n symbols = [symbols]\n result = Securities()\n for symbol in symbols:\n kargs['symbol'] = symbol\n result.add(id=symbol, security=_get_symbol(**kargs))\n return result\n",
"step-2": "<mask token>\n\n\ndef _build_url(**kargs):\n query = {'function': 'TIME_SERIES_DAILY', 'symbol': 'SPY', 'outputsize':\n 'full', 'datatype': 'json', 'apikey': 'JPIO2GNGBMFRLGMN'}\n query.update(kargs)\n query_str = '&'.join([f'{key}={val}' for key, val in query.items()])\n return f'{url_base}?{query_str}'\n\n\ndef _request(**kargs):\n url = _build_url(**kargs)\n return r.get(url)\n\n\ndef _get_symbol(symbol, **kargs):\n kargs['symbol'] = symbol\n kargs['datatype'] = 'csv'\n req = _request(**kargs)\n text = req.text\n header, *text = text.split()\n text = '\\n'.join([l for l in text[::-1]])\n csv_str = f'{header}\\n{text}'\n data = Data.load_csv(csv_str)\n return Security(symbol, data)\n\n\ndef get(symbols, **kargs):\n if not isinstance(symbols, list):\n symbols = [symbols]\n result = Securities()\n for symbol in symbols:\n kargs['symbol'] = symbol\n result.add(id=symbol, security=_get_symbol(**kargs))\n return result\n",
"step-3": "<mask token>\nurl_base = 'https://www.alphavantage.co/query'\n\n\ndef _build_url(**kargs):\n query = {'function': 'TIME_SERIES_DAILY', 'symbol': 'SPY', 'outputsize':\n 'full', 'datatype': 'json', 'apikey': 'JPIO2GNGBMFRLGMN'}\n query.update(kargs)\n query_str = '&'.join([f'{key}={val}' for key, val in query.items()])\n return f'{url_base}?{query_str}'\n\n\ndef _request(**kargs):\n url = _build_url(**kargs)\n return r.get(url)\n\n\ndef _get_symbol(symbol, **kargs):\n kargs['symbol'] = symbol\n kargs['datatype'] = 'csv'\n req = _request(**kargs)\n text = req.text\n header, *text = text.split()\n text = '\\n'.join([l for l in text[::-1]])\n csv_str = f'{header}\\n{text}'\n data = Data.load_csv(csv_str)\n return Security(symbol, data)\n\n\ndef get(symbols, **kargs):\n if not isinstance(symbols, list):\n symbols = [symbols]\n result = Securities()\n for symbol in symbols:\n kargs['symbol'] = symbol\n result.add(id=symbol, security=_get_symbol(**kargs))\n return result\n",
"step-4": "import requests as r\nfrom .security import Security, Securities\nfrom .data import Data\nurl_base = 'https://www.alphavantage.co/query'\n\n\ndef _build_url(**kargs):\n query = {'function': 'TIME_SERIES_DAILY', 'symbol': 'SPY', 'outputsize':\n 'full', 'datatype': 'json', 'apikey': 'JPIO2GNGBMFRLGMN'}\n query.update(kargs)\n query_str = '&'.join([f'{key}={val}' for key, val in query.items()])\n return f'{url_base}?{query_str}'\n\n\ndef _request(**kargs):\n url = _build_url(**kargs)\n return r.get(url)\n\n\ndef _get_symbol(symbol, **kargs):\n kargs['symbol'] = symbol\n kargs['datatype'] = 'csv'\n req = _request(**kargs)\n text = req.text\n header, *text = text.split()\n text = '\\n'.join([l for l in text[::-1]])\n csv_str = f'{header}\\n{text}'\n data = Data.load_csv(csv_str)\n return Security(symbol, data)\n\n\ndef get(symbols, **kargs):\n if not isinstance(symbols, list):\n symbols = [symbols]\n result = Securities()\n for symbol in symbols:\n kargs['symbol'] = symbol\n result.add(id=symbol, security=_get_symbol(**kargs))\n return result\n",
"step-5": "import requests as r\n\nfrom .security import Security, Securities\nfrom .data import Data\n\n\nurl_base = 'https://www.alphavantage.co/query'\n\ndef _build_url(**kargs):\n\tquery = {\n\t'function': 'TIME_SERIES_DAILY',\n\t'symbol': 'SPY',\n\t'outputsize': 'full',\n\t'datatype': 'json',\n\t'apikey': 'JPIO2GNGBMFRLGMN'\n\t}\n\tquery.update(kargs)\n\t\n\tquery_str = '&'.join([f'{key}={val}' for key, val in query.items()])\n\treturn f'{url_base}?{query_str}'\n\t\ndef _request(**kargs):\n\turl = _build_url(**kargs)\n\treturn r.get(url)\n\ndef _get_symbol(symbol, **kargs):\n\tkargs['symbol'] = symbol\n\tkargs['datatype'] = 'csv'\n\treq = _request(**kargs)\n\t# Reverse dates to past to present\n\ttext = req.text\n\theader, *text = text.split()\n\ttext = '\\n'.join(\n\t\t[l for l in text[::-1]]\n\t)\n\tcsv_str = f'{header}\\n{text}'\n\n\tdata = Data.load_csv(csv_str)\n\treturn Security(symbol, data) \n\t\ndef get(symbols, **kargs):\n\tif not isinstance(symbols, list):\n\t\tsymbols = [symbols]\n\t\t\n\tresult = Securities()\n\tfor symbol in symbols:\n\t\tkargs['symbol'] = symbol\n\t\tresult.add(\n\t\t\tid=symbol,\n\t\t\tsecurity=_get_symbol(**kargs)\n\t\t)\n\treturn result\n\t\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import urllib.request
import praw
from praw import reddit
from praw.models.listing.mixins import submission
def download_subreddit(sub):
reddit = praw.Reddit(client_id='oFOYuOd31vUb4UstBWDhnQ',
client_secret='0W_86zufGFCJlSE4lK3CwF_0UEQEQw',
username='MarshallBranin',
password='#Marshall2',
user_agent='macos:com.example.text_app:v1.0.0 (by /u/MarshallBranin)')
reddit.read_only=True
# Iterate through top submissions
for submission in praw.reddit.Subreddit(reddit, display_name=f"{sub}").new(limit=None):
# Get the link of the submission
url = str(submission.url)
# Check if the link is an image
if url.endswith("jpg") or url.endswith("jpeg") or url.endswith("png"):
# Retrieve the image and save it in current folder
urllib.request.urlretrieve(url, "instagram/INSTAGRAM.jpg")
break
|
normal
|
{
"blob_id": "d19310a45a684a7bbb456555a954439df8ae92b6",
"index": 1392,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef download_subreddit(sub):\n reddit = praw.Reddit(client_id='oFOYuOd31vUb4UstBWDhnQ', client_secret=\n '0W_86zufGFCJlSE4lK3CwF_0UEQEQw', username='MarshallBranin',\n password='#Marshall2', user_agent=\n 'macos:com.example.text_app:v1.0.0 (by /u/MarshallBranin)')\n reddit.read_only = True\n for submission in praw.reddit.Subreddit(reddit, display_name=f'{sub}').new(\n limit=None):\n url = str(submission.url)\n if url.endswith('jpg') or url.endswith('jpeg') or url.endswith('png'):\n urllib.request.urlretrieve(url, 'instagram/INSTAGRAM.jpg')\n break\n",
"step-3": "import urllib.request\nimport praw\nfrom praw import reddit\nfrom praw.models.listing.mixins import submission\n\n\ndef download_subreddit(sub):\n reddit = praw.Reddit(client_id='oFOYuOd31vUb4UstBWDhnQ', client_secret=\n '0W_86zufGFCJlSE4lK3CwF_0UEQEQw', username='MarshallBranin',\n password='#Marshall2', user_agent=\n 'macos:com.example.text_app:v1.0.0 (by /u/MarshallBranin)')\n reddit.read_only = True\n for submission in praw.reddit.Subreddit(reddit, display_name=f'{sub}').new(\n limit=None):\n url = str(submission.url)\n if url.endswith('jpg') or url.endswith('jpeg') or url.endswith('png'):\n urllib.request.urlretrieve(url, 'instagram/INSTAGRAM.jpg')\n break\n",
"step-4": "import urllib.request\nimport praw\nfrom praw import reddit\nfrom praw.models.listing.mixins import submission\n\n\ndef download_subreddit(sub):\n reddit = praw.Reddit(client_id='oFOYuOd31vUb4UstBWDhnQ',\n client_secret='0W_86zufGFCJlSE4lK3CwF_0UEQEQw',\n username='MarshallBranin',\n password='#Marshall2',\n user_agent='macos:com.example.text_app:v1.0.0 (by /u/MarshallBranin)') \n \n reddit.read_only=True\n\n # Iterate through top submissions\n for submission in praw.reddit.Subreddit(reddit, display_name=f\"{sub}\").new(limit=None):\n\n # Get the link of the submission\n url = str(submission.url)\n\n # Check if the link is an image\n if url.endswith(\"jpg\") or url.endswith(\"jpeg\") or url.endswith(\"png\"):\n\n # Retrieve the image and save it in current folder\n urllib.request.urlretrieve(url, \"instagram/INSTAGRAM.jpg\")\n break\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@receiver(post_save, sender=User)
def save_profile(sender, created, instance, **kwargs):
if created:
profile = Profile.objects.create(user=instance)
profile.save()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
User = get_user_model()
@receiver(post_save, sender=User)
def save_profile(sender, created, instance, **kwargs):
if created:
profile = Profile.objects.create(user=instance)
profile.save()
<|reserved_special_token_1|>
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth import get_user_model
from .models import Profile
User = get_user_model()
@receiver(post_save, sender=User)
def save_profile(sender, created, instance, **kwargs):
if created:
profile = Profile.objects.create(user=instance)
profile.save()
<|reserved_special_token_1|>
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth import get_user_model
from .models import Profile
User = get_user_model()
# this wan't run on creating superuser
@receiver(post_save, sender=User)
def save_profile(sender, created, instance, **kwargs):
if created:
profile = Profile.objects.create(user=instance)
profile.save()
|
flexible
|
{
"blob_id": "4f93af104130f5a7c853ee0e7976fd52847e588a",
"index": 4988,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@receiver(post_save, sender=User)\ndef save_profile(sender, created, instance, **kwargs):\n if created:\n profile = Profile.objects.create(user=instance)\n profile.save()\n",
"step-3": "<mask token>\nUser = get_user_model()\n\n\n@receiver(post_save, sender=User)\ndef save_profile(sender, created, instance, **kwargs):\n if created:\n profile = Profile.objects.create(user=instance)\n profile.save()\n",
"step-4": "from django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth import get_user_model\nfrom .models import Profile\nUser = get_user_model()\n\n\n@receiver(post_save, sender=User)\ndef save_profile(sender, created, instance, **kwargs):\n if created:\n profile = Profile.objects.create(user=instance)\n profile.save()\n",
"step-5": "from django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth import get_user_model\nfrom .models import Profile\n\nUser = get_user_model()\n\n# this wan't run on creating superuser\n@receiver(post_save, sender=User)\ndef save_profile(sender, created, instance, **kwargs):\n if created:\n profile = Profile.objects.create(user=instance)\n profile.save()\n ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Solution(object):
def isPalindrome(self, x):
"""
:type x: int
:rtype: bool
"""
if x < 0:
return False
t = []
while x != 0:
t.append(x % 10)
x /= 10
i, j = 0, len(t) - 1
while i < j:
if t[i] != t[j]:
return False
i += 1
j -= 1
return True
|
normal
|
{
"blob_id": "ef1b759872de6602646ce095823ff37f043ffd9d",
"index": 5148,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def isPalindrome(self, x):\n \"\"\"\n :type x: int\n :rtype: bool\n \"\"\"\n if x < 0:\n return False\n t = []\n while x != 0:\n t.append(x % 10)\n x /= 10\n i, j = 0, len(t) - 1\n while i < j:\n if t[i] != t[j]:\n return False\n i += 1\n j -= 1\n return True\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def plot(identifiers, chipIndexes, firstRunChipHistory,
recentRunChipHistory, specificRunChipHistory, groupedChipHistory,
mode_parameters=None):
if mode_parameters is None:
mode_parameters = {}
mode_parameters['colorsOverride'] = (plotDescription['plotDefaults'][
'colorMap'], 0.85, 0) if mode_parameters['colorsOverride'] == [
] else mode_parameters['colorsOverride']
mode_parameters['figureSizeOverride'] = plotDescription['plotDefaults'][
'figsize'] if mode_parameters['figureSizeOverride'
] is None else mode_parameters['figureSizeOverride']
return importedOutputCurvePlot(specificRunChipHistory, identifiers=
identifiers, mode_parameters=mode_parameters)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
plotDescription = {'name': 'Chip Output Curves', 'plotCategory': 'chip',
'priority': 40, 'dataFileDependencies': ['DrainSweep.json'],
'plotDefaults': {'figsize': (2, 2.5), 'colorMap': 'magma'}}
def plot(identifiers, chipIndexes, firstRunChipHistory,
recentRunChipHistory, specificRunChipHistory, groupedChipHistory,
mode_parameters=None):
if mode_parameters is None:
mode_parameters = {}
mode_parameters['colorsOverride'] = (plotDescription['plotDefaults'][
'colorMap'], 0.85, 0) if mode_parameters['colorsOverride'] == [
] else mode_parameters['colorsOverride']
mode_parameters['figureSizeOverride'] = plotDescription['plotDefaults'][
'figsize'] if mode_parameters['figureSizeOverride'
] is None else mode_parameters['figureSizeOverride']
return importedOutputCurvePlot(specificRunChipHistory, identifiers=
identifiers, mode_parameters=mode_parameters)
<|reserved_special_token_1|>
from utilities.MatplotlibUtility import *
from utilities.PlotDefinitions.DrainSweep.OutputCurve import plot as importedOutputCurvePlot
plotDescription = {'name': 'Chip Output Curves', 'plotCategory': 'chip',
'priority': 40, 'dataFileDependencies': ['DrainSweep.json'],
'plotDefaults': {'figsize': (2, 2.5), 'colorMap': 'magma'}}
def plot(identifiers, chipIndexes, firstRunChipHistory,
recentRunChipHistory, specificRunChipHistory, groupedChipHistory,
mode_parameters=None):
if mode_parameters is None:
mode_parameters = {}
mode_parameters['colorsOverride'] = (plotDescription['plotDefaults'][
'colorMap'], 0.85, 0) if mode_parameters['colorsOverride'] == [
] else mode_parameters['colorsOverride']
mode_parameters['figureSizeOverride'] = plotDescription['plotDefaults'][
'figsize'] if mode_parameters['figureSizeOverride'
] is None else mode_parameters['figureSizeOverride']
return importedOutputCurvePlot(specificRunChipHistory, identifiers=
identifiers, mode_parameters=mode_parameters)
<|reserved_special_token_1|>
from utilities.MatplotlibUtility import *
from utilities.PlotDefinitions.DrainSweep.OutputCurve import plot as importedOutputCurvePlot
plotDescription = {
'name':'Chip Output Curves',
'plotCategory': 'chip',
'priority': 40,
'dataFileDependencies': ['DrainSweep.json'],
'plotDefaults': {
'figsize':(2,2.5),
'colorMap':'magma',
},
}
def plot(identifiers, chipIndexes, firstRunChipHistory, recentRunChipHistory, specificRunChipHistory, groupedChipHistory, mode_parameters=None):
if(mode_parameters is None):
mode_parameters = {}
#mode_parameters['enableColorBar'] = False
mode_parameters['colorsOverride'] = (plotDescription['plotDefaults']['colorMap'], 0.85, 0) if(mode_parameters['colorsOverride'] == []) else mode_parameters['colorsOverride']
mode_parameters['figureSizeOverride'] = plotDescription['plotDefaults']['figsize'] if(mode_parameters['figureSizeOverride'] is None) else mode_parameters['figureSizeOverride']
return importedOutputCurvePlot(specificRunChipHistory, identifiers=identifiers, mode_parameters=mode_parameters)
|
flexible
|
{
"blob_id": "49ae9e90402d784fc3af3b47e96842fbfe842104",
"index": 9480,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef plot(identifiers, chipIndexes, firstRunChipHistory,\n recentRunChipHistory, specificRunChipHistory, groupedChipHistory,\n mode_parameters=None):\n if mode_parameters is None:\n mode_parameters = {}\n mode_parameters['colorsOverride'] = (plotDescription['plotDefaults'][\n 'colorMap'], 0.85, 0) if mode_parameters['colorsOverride'] == [\n ] else mode_parameters['colorsOverride']\n mode_parameters['figureSizeOverride'] = plotDescription['plotDefaults'][\n 'figsize'] if mode_parameters['figureSizeOverride'\n ] is None else mode_parameters['figureSizeOverride']\n return importedOutputCurvePlot(specificRunChipHistory, identifiers=\n identifiers, mode_parameters=mode_parameters)\n",
"step-3": "<mask token>\nplotDescription = {'name': 'Chip Output Curves', 'plotCategory': 'chip',\n 'priority': 40, 'dataFileDependencies': ['DrainSweep.json'],\n 'plotDefaults': {'figsize': (2, 2.5), 'colorMap': 'magma'}}\n\n\ndef plot(identifiers, chipIndexes, firstRunChipHistory,\n recentRunChipHistory, specificRunChipHistory, groupedChipHistory,\n mode_parameters=None):\n if mode_parameters is None:\n mode_parameters = {}\n mode_parameters['colorsOverride'] = (plotDescription['plotDefaults'][\n 'colorMap'], 0.85, 0) if mode_parameters['colorsOverride'] == [\n ] else mode_parameters['colorsOverride']\n mode_parameters['figureSizeOverride'] = plotDescription['plotDefaults'][\n 'figsize'] if mode_parameters['figureSizeOverride'\n ] is None else mode_parameters['figureSizeOverride']\n return importedOutputCurvePlot(specificRunChipHistory, identifiers=\n identifiers, mode_parameters=mode_parameters)\n",
"step-4": "from utilities.MatplotlibUtility import *\nfrom utilities.PlotDefinitions.DrainSweep.OutputCurve import plot as importedOutputCurvePlot\nplotDescription = {'name': 'Chip Output Curves', 'plotCategory': 'chip',\n 'priority': 40, 'dataFileDependencies': ['DrainSweep.json'],\n 'plotDefaults': {'figsize': (2, 2.5), 'colorMap': 'magma'}}\n\n\ndef plot(identifiers, chipIndexes, firstRunChipHistory,\n recentRunChipHistory, specificRunChipHistory, groupedChipHistory,\n mode_parameters=None):\n if mode_parameters is None:\n mode_parameters = {}\n mode_parameters['colorsOverride'] = (plotDescription['plotDefaults'][\n 'colorMap'], 0.85, 0) if mode_parameters['colorsOverride'] == [\n ] else mode_parameters['colorsOverride']\n mode_parameters['figureSizeOverride'] = plotDescription['plotDefaults'][\n 'figsize'] if mode_parameters['figureSizeOverride'\n ] is None else mode_parameters['figureSizeOverride']\n return importedOutputCurvePlot(specificRunChipHistory, identifiers=\n identifiers, mode_parameters=mode_parameters)\n",
"step-5": "from utilities.MatplotlibUtility import *\nfrom utilities.PlotDefinitions.DrainSweep.OutputCurve import plot as importedOutputCurvePlot\n\n\nplotDescription = {\n\t'name':'Chip Output Curves',\n\t'plotCategory': 'chip',\n\t'priority': 40,\n\t'dataFileDependencies': ['DrainSweep.json'],\n\t'plotDefaults': {\n\t\t'figsize':(2,2.5),\n\t\t'colorMap':'magma',\n\t},\n}\n\ndef plot(identifiers, chipIndexes, firstRunChipHistory, recentRunChipHistory, specificRunChipHistory, groupedChipHistory, mode_parameters=None):\n\tif(mode_parameters is None):\n\t\tmode_parameters = {}\n\t#mode_parameters['enableColorBar'] = False\n\tmode_parameters['colorsOverride'] = (plotDescription['plotDefaults']['colorMap'], 0.85, 0) if(mode_parameters['colorsOverride'] == []) else mode_parameters['colorsOverride']\n\tmode_parameters['figureSizeOverride'] = plotDescription['plotDefaults']['figsize'] \t\t if(mode_parameters['figureSizeOverride'] is None) else mode_parameters['figureSizeOverride']\n\t\n\treturn importedOutputCurvePlot(specificRunChipHistory, identifiers=identifiers, mode_parameters=mode_parameters)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CreateAlertForm(forms.ModelForm):
class Meta:
model = Alert
exclude = ['role', 'age_analysis', 'Date_Uploaded', 'alias_name',
'CAMT_Reveiewer', 'Date_Regularised', 'alert_message', 'Count2']
<|reserved_special_token_1|>
from django import forms
from acl.models import Alert
class CreateAlertForm(forms.ModelForm):
class Meta:
model = Alert
exclude = ['role', 'age_analysis', 'Date_Uploaded', 'alias_name',
'CAMT_Reveiewer', 'Date_Regularised', 'alert_message', 'Count2']
|
flexible
|
{
"blob_id": "bfcf6e241881c4f668f926e087ab0f7dcad61dee",
"index": 5260,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass CreateAlertForm(forms.ModelForm):\n\n\n class Meta:\n model = Alert\n exclude = ['role', 'age_analysis', 'Date_Uploaded', 'alias_name',\n 'CAMT_Reveiewer', 'Date_Regularised', 'alert_message', 'Count2']\n",
"step-3": "from django import forms\nfrom acl.models import Alert\n\n\nclass CreateAlertForm(forms.ModelForm):\n\n\n class Meta:\n model = Alert\n exclude = ['role', 'age_analysis', 'Date_Uploaded', 'alias_name',\n 'CAMT_Reveiewer', 'Date_Regularised', 'alert_message', 'Count2']\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def get_data():
r = requests.get(url)
return '{ "data": ' + str(r.text) + '}'
def main():
args = ccloud_lib.parse_args()
config_file = args.config_file
topic = args.topic
conf = ccloud_lib.read_ccloud_config(config_file)
producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)
producer = Producer(producer_conf)
ccloud_lib.create_topic(conf, topic)
print('hello world')
d = get_data()
djson = json.loads(d)
darray = djson['data']
for item in darray:
record_key = str(item['_id'])
record_value = json.dumps(item)
print(record_value)
producer.produce(topic, key=record_key, value=record_value,
on_delivery=acked)
producer.poll(0)
producer.flush()
print('{} messages were produced to topic {}!'.format(delivered_records,
topic))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def acked(err, msg):
global delivered_records
"""Delivery report handler called on
successful or failed delivery of message
"""
if err is not None:
print('Failed to deliver message: {}'.format(err))
else:
delivered_records += 1
print('Produced record to topic {} partition [{}] @ offset {}'.
format(msg.topic(), msg.partition(), msg.offset()))
def get_data():
r = requests.get(url)
return '{ "data": ' + str(r.text) + '}'
def main():
args = ccloud_lib.parse_args()
config_file = args.config_file
topic = args.topic
conf = ccloud_lib.read_ccloud_config(config_file)
producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)
producer = Producer(producer_conf)
ccloud_lib.create_topic(conf, topic)
print('hello world')
d = get_data()
djson = json.loads(d)
darray = djson['data']
for item in darray:
record_key = str(item['_id'])
record_value = json.dumps(item)
print(record_value)
producer.produce(topic, key=record_key, value=record_value,
on_delivery=acked)
producer.poll(0)
producer.flush()
print('{} messages were produced to topic {}!'.format(delivered_records,
topic))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
delivered_records = 0
url = 'https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0'
def acked(err, msg):
global delivered_records
"""Delivery report handler called on
successful or failed delivery of message
"""
if err is not None:
print('Failed to deliver message: {}'.format(err))
else:
delivered_records += 1
print('Produced record to topic {} partition [{}] @ offset {}'.
format(msg.topic(), msg.partition(), msg.offset()))
def get_data():
r = requests.get(url)
return '{ "data": ' + str(r.text) + '}'
def main():
args = ccloud_lib.parse_args()
config_file = args.config_file
topic = args.topic
conf = ccloud_lib.read_ccloud_config(config_file)
producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)
producer = Producer(producer_conf)
ccloud_lib.create_topic(conf, topic)
print('hello world')
d = get_data()
djson = json.loads(d)
darray = djson['data']
for item in darray:
record_key = str(item['_id'])
record_value = json.dumps(item)
print(record_value)
producer.produce(topic, key=record_key, value=record_value,
on_delivery=acked)
producer.poll(0)
producer.flush()
print('{} messages were produced to topic {}!'.format(delivered_records,
topic))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import requests
from confluent_kafka import Producer, KafkaError
import json
import ccloud_lib
delivered_records = 0
url = 'https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0'
def acked(err, msg):
global delivered_records
"""Delivery report handler called on
successful or failed delivery of message
"""
if err is not None:
print('Failed to deliver message: {}'.format(err))
else:
delivered_records += 1
print('Produced record to topic {} partition [{}] @ offset {}'.
format(msg.topic(), msg.partition(), msg.offset()))
def get_data():
r = requests.get(url)
return '{ "data": ' + str(r.text) + '}'
def main():
args = ccloud_lib.parse_args()
config_file = args.config_file
topic = args.topic
conf = ccloud_lib.read_ccloud_config(config_file)
producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)
producer = Producer(producer_conf)
ccloud_lib.create_topic(conf, topic)
print('hello world')
d = get_data()
djson = json.loads(d)
darray = djson['data']
for item in darray:
record_key = str(item['_id'])
record_value = json.dumps(item)
print(record_value)
producer.produce(topic, key=record_key, value=record_value,
on_delivery=acked)
producer.poll(0)
producer.flush()
print('{} messages were produced to topic {}!'.format(delivered_records,
topic))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import requests
#!/usr/bin/env python
from confluent_kafka import Producer, KafkaError
import json
import ccloud_lib
delivered_records = 0
url = "https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0"
# Optional per-message on_delivery handler (triggered by poll() or flush())
# when a message has been successfully delivered or
# permanently failed delivery (after retries).
def acked(err, msg):
global delivered_records
"""Delivery report handler called on
successful or failed delivery of message
"""
if err is not None:
print("Failed to deliver message: {}".format(err))
else:
delivered_records += 1
print("Produced record to topic {} partition [{}] @ offset {}"
.format(msg.topic(), msg.partition(), msg.offset()))
#get mockaroo data records
#make sure mockaroo schema is set to output array
def get_data():
r = requests.get(url)
return '{ "data": ' + str(r.text) + '}'
def main():
# Read arguments and configurations and initialize
args = ccloud_lib.parse_args()
config_file = args.config_file
topic = args.topic
conf = ccloud_lib.read_ccloud_config(config_file)
# Create Producer instance
producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)
producer = Producer(producer_conf)
# Create topic if needed
ccloud_lib.create_topic(conf, topic)
print("hello world")
d = get_data()
djson = json.loads(d)
darray = djson['data']
for item in darray:
record_key = str(item['_id'])
record_value = json.dumps(item)
print(record_value)
producer.produce(topic, key=record_key, value=record_value, on_delivery=acked)
producer.poll(0)
producer.flush()
print("{} messages were produced to topic {}!".format(delivered_records, topic))
if __name__ == '__main__':
main()
# to run program
# python user_purchases_to_kafka.py -f ~/.confluent/python.config -t user_purchases
# python user_activity_to_kafka.py -f ~/.confluent/python.config -t user_activity
|
flexible
|
{
"blob_id": "b4f522398cd2658c2db926216e974781e10c44df",
"index": 7897,
"step-1": "<mask token>\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print('Failed to deliver message: {}'.format(err))\n else:\n delivered_records += 1\n print('Produced record to topic {} partition [{}] @ offset {}'.\n format(msg.topic(), msg.partition(), msg.offset()))\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\ndelivered_records = 0\nurl = 'https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0'\n\n\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print('Failed to deliver message: {}'.format(err))\n else:\n delivered_records += 1\n print('Produced record to topic {} partition [{}] @ offset {}'.\n format(msg.topic(), msg.partition(), msg.offset()))\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import requests\nfrom confluent_kafka import Producer, KafkaError\nimport json\nimport ccloud_lib\ndelivered_records = 0\nurl = 'https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0'\n\n\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print('Failed to deliver message: {}'.format(err))\n else:\n delivered_records += 1\n print('Produced record to topic {} partition [{}] @ offset {}'.\n format(msg.topic(), msg.partition(), msg.offset()))\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import requests\n#!/usr/bin/env python\n\nfrom confluent_kafka import Producer, KafkaError\nimport json\nimport ccloud_lib\n\ndelivered_records = 0\nurl = \"https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0\"\n\n\n # Optional per-message on_delivery handler (triggered by poll() or flush())\n # when a message has been successfully delivered or\n # permanently failed delivery (after retries).\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print(\"Failed to deliver message: {}\".format(err))\n else:\n delivered_records += 1\n print(\"Produced record to topic {} partition [{}] @ offset {}\"\n .format(msg.topic(), msg.partition(), msg.offset()))\n\n#get mockaroo data records\n#make sure mockaroo schema is set to output array\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\ndef main():\n # Read arguments and configurations and initialize\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n\n # Create Producer instance\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n\n # Create topic if needed\n ccloud_lib.create_topic(conf, topic)\n\n print(\"hello world\")\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value, on_delivery=acked)\n producer.poll(0)\n\n producer.flush()\n\n print(\"{} messages were produced to topic {}!\".format(delivered_records, topic))\n\n\nif __name__ == '__main__':\n main()\n\n\n# to run program\n# python user_purchases_to_kafka.py -f ~/.confluent/python.config -t user_purchases\n# python user_activity_to_kafka.py -f ~/.confluent/python.config -t user_activity\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
"""#########################################################################
Author: Yingru Liu
Institute: Stony Brook University
Descriptions: transer the numpy files of the midi songs into midi files.
(Cause the code privided by RNN-RBM tutorial to save midi
runs in python 2.7 but my code is in python 3.6)
----2017.12.29
#########################################################################"""
import numpy as np
from midi.utils import midiread, midiwrite
#
CGRNN_FOLDER = "Samples/CGRNN/"
SRNN_FOLDER = "Samples/SRNN/"
VRNN_FOLDER = "Samples/VRNN/"
ssRnnRbm_FOLDER = "Samples/ssRnnRbm/"
Ground_FOLDER = "Samples/"
for i in range(20):
print('The ' + str(i) + '-th graph.')
Ground_sample = np.load(Ground_FOLDER + 'Ground-True-' + str(i) + '.npy')
CGRNN_sample = np.load(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.npy')
SRNN_sample = np.load(SRNN_FOLDER + 'SRNN-' + str(i) + '.npy')
VRNN_sample = np.load(VRNN_FOLDER + 'VRNN-' + str(i) + '.npy')
ssRnnRbm_sample = np.load(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.npy')
midiwrite(Ground_FOLDER + 'Ground-True-' + str(i) + '.mid', Ground_sample, (1, 128), 0.25)
midiwrite(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.mid', CGRNN_sample, (1, 128), 0.25)
midiwrite(SRNN_FOLDER + 'SRNN-' + str(i) + '.mid', SRNN_sample, (1, 128), 0.25)
midiwrite(VRNN_FOLDER + 'VRNN-' + str(i) + '.mid', VRNN_sample, (1, 128), 0.25)
midiwrite(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.mid', ssRnnRbm_sample, (1, 128), 0.25)
pass
|
normal
|
{
"blob_id": "af152e0b739305866902ee141f94641b17ff03ea",
"index": 6496,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(20):\n print('The ' + str(i) + '-th graph.')\n Ground_sample = np.load(Ground_FOLDER + 'Ground-True-' + str(i) + '.npy')\n CGRNN_sample = np.load(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.npy')\n SRNN_sample = np.load(SRNN_FOLDER + 'SRNN-' + str(i) + '.npy')\n VRNN_sample = np.load(VRNN_FOLDER + 'VRNN-' + str(i) + '.npy')\n ssRnnRbm_sample = np.load(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.npy')\n midiwrite(Ground_FOLDER + 'Ground-True-' + str(i) + '.mid',\n Ground_sample, (1, 128), 0.25)\n midiwrite(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.mid', CGRNN_sample, (1, \n 128), 0.25)\n midiwrite(SRNN_FOLDER + 'SRNN-' + str(i) + '.mid', SRNN_sample, (1, 128\n ), 0.25)\n midiwrite(VRNN_FOLDER + 'VRNN-' + str(i) + '.mid', VRNN_sample, (1, 128\n ), 0.25)\n midiwrite(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.mid',\n ssRnnRbm_sample, (1, 128), 0.25)\n pass\n",
"step-3": "<mask token>\nCGRNN_FOLDER = 'Samples/CGRNN/'\nSRNN_FOLDER = 'Samples/SRNN/'\nVRNN_FOLDER = 'Samples/VRNN/'\nssRnnRbm_FOLDER = 'Samples/ssRnnRbm/'\nGround_FOLDER = 'Samples/'\nfor i in range(20):\n print('The ' + str(i) + '-th graph.')\n Ground_sample = np.load(Ground_FOLDER + 'Ground-True-' + str(i) + '.npy')\n CGRNN_sample = np.load(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.npy')\n SRNN_sample = np.load(SRNN_FOLDER + 'SRNN-' + str(i) + '.npy')\n VRNN_sample = np.load(VRNN_FOLDER + 'VRNN-' + str(i) + '.npy')\n ssRnnRbm_sample = np.load(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.npy')\n midiwrite(Ground_FOLDER + 'Ground-True-' + str(i) + '.mid',\n Ground_sample, (1, 128), 0.25)\n midiwrite(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.mid', CGRNN_sample, (1, \n 128), 0.25)\n midiwrite(SRNN_FOLDER + 'SRNN-' + str(i) + '.mid', SRNN_sample, (1, 128\n ), 0.25)\n midiwrite(VRNN_FOLDER + 'VRNN-' + str(i) + '.mid', VRNN_sample, (1, 128\n ), 0.25)\n midiwrite(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.mid',\n ssRnnRbm_sample, (1, 128), 0.25)\n pass\n",
"step-4": "<mask token>\nimport numpy as np\nfrom midi.utils import midiread, midiwrite\nCGRNN_FOLDER = 'Samples/CGRNN/'\nSRNN_FOLDER = 'Samples/SRNN/'\nVRNN_FOLDER = 'Samples/VRNN/'\nssRnnRbm_FOLDER = 'Samples/ssRnnRbm/'\nGround_FOLDER = 'Samples/'\nfor i in range(20):\n print('The ' + str(i) + '-th graph.')\n Ground_sample = np.load(Ground_FOLDER + 'Ground-True-' + str(i) + '.npy')\n CGRNN_sample = np.load(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.npy')\n SRNN_sample = np.load(SRNN_FOLDER + 'SRNN-' + str(i) + '.npy')\n VRNN_sample = np.load(VRNN_FOLDER + 'VRNN-' + str(i) + '.npy')\n ssRnnRbm_sample = np.load(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.npy')\n midiwrite(Ground_FOLDER + 'Ground-True-' + str(i) + '.mid',\n Ground_sample, (1, 128), 0.25)\n midiwrite(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.mid', CGRNN_sample, (1, \n 128), 0.25)\n midiwrite(SRNN_FOLDER + 'SRNN-' + str(i) + '.mid', SRNN_sample, (1, 128\n ), 0.25)\n midiwrite(VRNN_FOLDER + 'VRNN-' + str(i) + '.mid', VRNN_sample, (1, 128\n ), 0.25)\n midiwrite(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.mid',\n ssRnnRbm_sample, (1, 128), 0.25)\n pass\n",
"step-5": "\"\"\"#########################################################################\r\nAuthor: Yingru Liu\r\nInstitute: Stony Brook University\r\nDescriptions: transer the numpy files of the midi songs into midi files.\r\n (Cause the code privided by RNN-RBM tutorial to save midi\r\n runs in python 2.7 but my code is in python 3.6)\r\n ----2017.12.29\r\n#########################################################################\"\"\"\r\nimport numpy as np\r\nfrom midi.utils import midiread, midiwrite\r\n#\r\nCGRNN_FOLDER = \"Samples/CGRNN/\"\r\nSRNN_FOLDER = \"Samples/SRNN/\"\r\nVRNN_FOLDER = \"Samples/VRNN/\"\r\nssRnnRbm_FOLDER = \"Samples/ssRnnRbm/\"\r\nGround_FOLDER = \"Samples/\"\r\n\r\nfor i in range(20):\r\n print('The ' + str(i) + '-th graph.')\r\n Ground_sample = np.load(Ground_FOLDER + 'Ground-True-' + str(i) + '.npy')\r\n CGRNN_sample = np.load(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.npy')\r\n SRNN_sample = np.load(SRNN_FOLDER + 'SRNN-' + str(i) + '.npy')\r\n VRNN_sample = np.load(VRNN_FOLDER + 'VRNN-' + str(i) + '.npy')\r\n ssRnnRbm_sample = np.load(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.npy')\r\n midiwrite(Ground_FOLDER + 'Ground-True-' + str(i) + '.mid', Ground_sample, (1, 128), 0.25)\r\n midiwrite(CGRNN_FOLDER + 'CGRNN-' + str(i) + '.mid', CGRNN_sample, (1, 128), 0.25)\r\n midiwrite(SRNN_FOLDER + 'SRNN-' + str(i) + '.mid', SRNN_sample, (1, 128), 0.25)\r\n midiwrite(VRNN_FOLDER + 'VRNN-' + str(i) + '.mid', VRNN_sample, (1, 128), 0.25)\r\n midiwrite(ssRnnRbm_FOLDER + 'ssRnnRbm-' + str(i) + '.mid', ssRnnRbm_sample, (1, 128), 0.25)\r\n pass",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
os.chdir('/mnt/cwd')
<|reserved_special_token_0|>
with open('/mnt/scripts/outputs/instcat_list_subset' + str(subset_index) +
'.json', 'r') as f:
instcat_list_subset = json.load(f)
sys.path.append('/mnt/scripts')
<|reserved_special_token_0|>
ict.determine_instcat_work(instcat_list_subset,
'/mnt/scripts/outputs/worklist_subset' + str(subset_index) + '.json')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
os.chdir('/mnt/cwd')
subset_index = sys.argv[1]
with open('/mnt/scripts/outputs/instcat_list_subset' + str(subset_index) +
'.json', 'r') as f:
instcat_list_subset = json.load(f)
sys.path.append('/mnt/scripts')
<|reserved_special_token_0|>
ict.determine_instcat_work(instcat_list_subset,
'/mnt/scripts/outputs/worklist_subset' + str(subset_index) + '.json')
<|reserved_special_token_1|>
import json
import sys
import os
os.chdir('/mnt/cwd')
subset_index = sys.argv[1]
with open('/mnt/scripts/outputs/instcat_list_subset' + str(subset_index) +
'.json', 'r') as f:
instcat_list_subset = json.load(f)
sys.path.append('/mnt/scripts')
import instcat_trimmer as ict
ict.determine_instcat_work(instcat_list_subset,
'/mnt/scripts/outputs/worklist_subset' + str(subset_index) + '.json')
<|reserved_special_token_1|>
import json
import sys
import os
# Change to Singularity working directory.
os.chdir('/mnt/cwd')
# Take subset index as argument
subset_index = sys.argv[1]
# Open up subset matching this.
with open('/mnt/scripts/outputs/instcat_list_subset'+str(subset_index)+'.json', 'r') as f:
instcat_list_subset = json.load(f)
# Import instcat trimmer
sys.path.append('/mnt/scripts')
import instcat_trimmer as ict
ict.determine_instcat_work(instcat_list_subset, '/mnt/scripts/outputs/worklist_subset'+str(subset_index)+'.json')
|
flexible
|
{
"blob_id": "e2e5ca388d67f2a13eaef6067fc19e2dfe284a55",
"index": 4469,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nos.chdir('/mnt/cwd')\n<mask token>\nwith open('/mnt/scripts/outputs/instcat_list_subset' + str(subset_index) +\n '.json', 'r') as f:\n instcat_list_subset = json.load(f)\nsys.path.append('/mnt/scripts')\n<mask token>\nict.determine_instcat_work(instcat_list_subset, \n '/mnt/scripts/outputs/worklist_subset' + str(subset_index) + '.json')\n",
"step-3": "<mask token>\nos.chdir('/mnt/cwd')\nsubset_index = sys.argv[1]\nwith open('/mnt/scripts/outputs/instcat_list_subset' + str(subset_index) +\n '.json', 'r') as f:\n instcat_list_subset = json.load(f)\nsys.path.append('/mnt/scripts')\n<mask token>\nict.determine_instcat_work(instcat_list_subset, \n '/mnt/scripts/outputs/worklist_subset' + str(subset_index) + '.json')\n",
"step-4": "import json\nimport sys\nimport os\nos.chdir('/mnt/cwd')\nsubset_index = sys.argv[1]\nwith open('/mnt/scripts/outputs/instcat_list_subset' + str(subset_index) +\n '.json', 'r') as f:\n instcat_list_subset = json.load(f)\nsys.path.append('/mnt/scripts')\nimport instcat_trimmer as ict\nict.determine_instcat_work(instcat_list_subset, \n '/mnt/scripts/outputs/worklist_subset' + str(subset_index) + '.json')\n",
"step-5": "import json\nimport sys\nimport os\n\n# Change to Singularity working directory.\nos.chdir('/mnt/cwd')\n\n# Take subset index as argument\nsubset_index = sys.argv[1]\n\n# Open up subset matching this.\nwith open('/mnt/scripts/outputs/instcat_list_subset'+str(subset_index)+'.json', 'r') as f:\n instcat_list_subset = json.load(f)\n\n# Import instcat trimmer\nsys.path.append('/mnt/scripts')\nimport instcat_trimmer as ict\n\nict.determine_instcat_work(instcat_list_subset, '/mnt/scripts/outputs/worklist_subset'+str(subset_index)+'.json')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#coding: utf-8
import mmh3
from bitarray import bitarray
BIT_SIZE = 1 << 30
class BloomFilter:
def __init__(self):
# Initialize bloom filter, set size and all bits to 0
bit_array = bitarray(BIT_SIZE)
bit_array.setall(0)
self.bit_array = bit_array
def add(self, val):
point_list = self.get_postions(val)
for b in point_list:
self.bit_array[b] = 1
def get_postions(self, val):
# Get points positions in bit vector.
# 提供不同的hash种子得到多个hash函数, seed最好为质数
point1 = mmh3.hash(val, 5) % BIT_SIZE
point2 = mmh3.hash(val, 7) % BIT_SIZE
point3 = mmh3.hash(val, 11) % BIT_SIZE
point4 = mmh3.hash(val, 13) % BIT_SIZE
point7 = mmh3.hash(val, 19) % BIT_SIZE
point5 = mmh3.hash(val, 23) % BIT_SIZE
point6 = mmh3.hash(val, 31) % BIT_SIZE
return [point1, point2, point3, point4, point5, point6]
def is_contains(self, val):
point_list = self.get_postions(val)
result = True
for b in point_list:
result = result and self.bit_array[b]
return result
if __name__ == '__main__':
bf = BloomFilter()
# 第一次运行时会显示 not exists
if bf.is_contains('zqw'):
print('exists')
else:
print('not exists')
bf.add('zqw')
if bf.is_contains('shooter'):
print('exists')
else:
bf.add('shooter')
if bf.is_contains('zqw'):
print('exists')
else:
bf.add('zqw')
|
normal
|
{
"blob_id": "5a103a4f72b9cd3ea3911aeefeeb2194c8ad7df0",
"index": 589,
"step-1": "<mask token>\n\n\nclass BloomFilter:\n\n def __init__(self):\n bit_array = bitarray(BIT_SIZE)\n bit_array.setall(0)\n self.bit_array = bit_array\n\n def add(self, val):\n point_list = self.get_postions(val)\n for b in point_list:\n self.bit_array[b] = 1\n <mask token>\n\n def is_contains(self, val):\n point_list = self.get_postions(val)\n result = True\n for b in point_list:\n result = result and self.bit_array[b]\n return result\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass BloomFilter:\n\n def __init__(self):\n bit_array = bitarray(BIT_SIZE)\n bit_array.setall(0)\n self.bit_array = bit_array\n\n def add(self, val):\n point_list = self.get_postions(val)\n for b in point_list:\n self.bit_array[b] = 1\n\n def get_postions(self, val):\n point1 = mmh3.hash(val, 5) % BIT_SIZE\n point2 = mmh3.hash(val, 7) % BIT_SIZE\n point3 = mmh3.hash(val, 11) % BIT_SIZE\n point4 = mmh3.hash(val, 13) % BIT_SIZE\n point7 = mmh3.hash(val, 19) % BIT_SIZE\n point5 = mmh3.hash(val, 23) % BIT_SIZE\n point6 = mmh3.hash(val, 31) % BIT_SIZE\n return [point1, point2, point3, point4, point5, point6]\n\n def is_contains(self, val):\n point_list = self.get_postions(val)\n result = True\n for b in point_list:\n result = result and self.bit_array[b]\n return result\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass BloomFilter:\n\n def __init__(self):\n bit_array = bitarray(BIT_SIZE)\n bit_array.setall(0)\n self.bit_array = bit_array\n\n def add(self, val):\n point_list = self.get_postions(val)\n for b in point_list:\n self.bit_array[b] = 1\n\n def get_postions(self, val):\n point1 = mmh3.hash(val, 5) % BIT_SIZE\n point2 = mmh3.hash(val, 7) % BIT_SIZE\n point3 = mmh3.hash(val, 11) % BIT_SIZE\n point4 = mmh3.hash(val, 13) % BIT_SIZE\n point7 = mmh3.hash(val, 19) % BIT_SIZE\n point5 = mmh3.hash(val, 23) % BIT_SIZE\n point6 = mmh3.hash(val, 31) % BIT_SIZE\n return [point1, point2, point3, point4, point5, point6]\n\n def is_contains(self, val):\n point_list = self.get_postions(val)\n result = True\n for b in point_list:\n result = result and self.bit_array[b]\n return result\n\n\nif __name__ == '__main__':\n bf = BloomFilter()\n if bf.is_contains('zqw'):\n print('exists')\n else:\n print('not exists')\n bf.add('zqw')\n if bf.is_contains('shooter'):\n print('exists')\n else:\n bf.add('shooter')\n if bf.is_contains('zqw'):\n print('exists')\n else:\n bf.add('zqw')\n",
"step-4": "<mask token>\nBIT_SIZE = 1 << 30\n\n\nclass BloomFilter:\n\n def __init__(self):\n bit_array = bitarray(BIT_SIZE)\n bit_array.setall(0)\n self.bit_array = bit_array\n\n def add(self, val):\n point_list = self.get_postions(val)\n for b in point_list:\n self.bit_array[b] = 1\n\n def get_postions(self, val):\n point1 = mmh3.hash(val, 5) % BIT_SIZE\n point2 = mmh3.hash(val, 7) % BIT_SIZE\n point3 = mmh3.hash(val, 11) % BIT_SIZE\n point4 = mmh3.hash(val, 13) % BIT_SIZE\n point7 = mmh3.hash(val, 19) % BIT_SIZE\n point5 = mmh3.hash(val, 23) % BIT_SIZE\n point6 = mmh3.hash(val, 31) % BIT_SIZE\n return [point1, point2, point3, point4, point5, point6]\n\n def is_contains(self, val):\n point_list = self.get_postions(val)\n result = True\n for b in point_list:\n result = result and self.bit_array[b]\n return result\n\n\nif __name__ == '__main__':\n bf = BloomFilter()\n if bf.is_contains('zqw'):\n print('exists')\n else:\n print('not exists')\n bf.add('zqw')\n if bf.is_contains('shooter'):\n print('exists')\n else:\n bf.add('shooter')\n if bf.is_contains('zqw'):\n print('exists')\n else:\n bf.add('zqw')\n",
"step-5": "#coding: utf-8\nimport mmh3\nfrom bitarray import bitarray\n\nBIT_SIZE = 1 << 30\n\nclass BloomFilter:\n\n def __init__(self):\n # Initialize bloom filter, set size and all bits to 0\n bit_array = bitarray(BIT_SIZE)\n bit_array.setall(0)\n\n self.bit_array = bit_array\n\n def add(self, val):\n point_list = self.get_postions(val)\n\n for b in point_list:\n self.bit_array[b] = 1\n\n def get_postions(self, val):\n # Get points positions in bit vector.\n # 提供不同的hash种子得到多个hash函数, seed最好为质数\n\n point1 = mmh3.hash(val, 5) % BIT_SIZE\n point2 = mmh3.hash(val, 7) % BIT_SIZE\n point3 = mmh3.hash(val, 11) % BIT_SIZE\n point4 = mmh3.hash(val, 13) % BIT_SIZE\n point7 = mmh3.hash(val, 19) % BIT_SIZE\n point5 = mmh3.hash(val, 23) % BIT_SIZE\n point6 = mmh3.hash(val, 31) % BIT_SIZE\n\n return [point1, point2, point3, point4, point5, point6]\n\n def is_contains(self, val):\n point_list = self.get_postions(val)\n\n result = True\n for b in point_list:\n result = result and self.bit_array[b]\n\n return result\n\n\nif __name__ == '__main__':\n\n bf = BloomFilter()\n\n # 第一次运行时会显示 not exists\n\n if bf.is_contains('zqw'):\n print('exists')\n else:\n print('not exists')\n bf.add('zqw')\n\n if bf.is_contains('shooter'):\n print('exists')\n else:\n bf.add('shooter')\n\n if bf.is_contains('zqw'):\n print('exists')\n else:\n bf.add('zqw')",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for j in range(N):
for i in range(n):
r = random.uniform(0, 1)
if r < p:
x = 1
else:
x = 0
Y.append(x)
outcome = sum(Y)
b[outcome] = b[outcome] + 1
Y.clear()
for i in range(n + 1):
b[i] = b[i] / N
p = 0
<|reserved_special_token_0|>
for i in range(cv, 19):
p = p + b[i]
print('For a critical value of', cv,
'the probability of rejecting the old system in favor of a new system that is no better than is'
, p, '.')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
p = 0.5
n = 18
Y = []
b = [0] * (n + 1)
N = 100
for j in range(N):
for i in range(n):
r = random.uniform(0, 1)
if r < p:
x = 1
else:
x = 0
Y.append(x)
outcome = sum(Y)
b[outcome] = b[outcome] + 1
Y.clear()
for i in range(n + 1):
b[i] = b[i] / N
p = 0
cv = int(input('Enter a choice for the CV.'))
for i in range(cv, 19):
p = p + b[i]
print('For a critical value of', cv,
'the probability of rejecting the old system in favor of a new system that is no better than is'
, p, '.')
<|reserved_special_token_1|>
import matplotlib.pyplot as pmf
import random
p = 0.5
n = 18
Y = []
b = [0] * (n + 1)
N = 100
for j in range(N):
for i in range(n):
r = random.uniform(0, 1)
if r < p:
x = 1
else:
x = 0
Y.append(x)
outcome = sum(Y)
b[outcome] = b[outcome] + 1
Y.clear()
for i in range(n + 1):
b[i] = b[i] / N
p = 0
cv = int(input('Enter a choice for the CV.'))
for i in range(cv, 19):
p = p + b[i]
print('For a critical value of', cv,
'the probability of rejecting the old system in favor of a new system that is no better than is'
, p, '.')
<|reserved_special_token_1|>
#header
import matplotlib.pyplot as pmf
import random
p = 0.5 # Probablility of success for original system
n = 18 # Number of trials
Y = [] # Contains binomial RVs
b = [0] * (n+1) # List of n + 1 zeroes
N = 100 # Number of experiments performed
for j in range(N):
# Bernoulli random variable
for i in range(n):
r = random.uniform(0,1)
if r < p:
x = 1
else:
x = 0
Y.append(x)
outcome = sum(Y) # Number of successes from 0 to n
b[outcome] = b[outcome] + 1 # Record of successes for bar plot
Y.clear()
for i in range(n+1):
b[i] = b[i]/N # Probabilities
p = 0
cv = int(input('Enter a choice for the CV.'))
for i in range(cv, 19):
p = p + b[i]
print('For a critical value of', cv, 'the probability of rejecting the old system in favor of a new system that is no better than is', p,'.')
#cv = 13, 1/20 or the 5% rule
|
flexible
|
{
"blob_id": "9a1b268386b4652bf50af0365892ef7338329727",
"index": 9631,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor j in range(N):\n for i in range(n):\n r = random.uniform(0, 1)\n if r < p:\n x = 1\n else:\n x = 0\n Y.append(x)\n outcome = sum(Y)\n b[outcome] = b[outcome] + 1\n Y.clear()\nfor i in range(n + 1):\n b[i] = b[i] / N\n p = 0\n<mask token>\nfor i in range(cv, 19):\n p = p + b[i]\nprint('For a critical value of', cv,\n 'the probability of rejecting the old system in favor of a new system that is no better than is'\n , p, '.')\n",
"step-3": "<mask token>\np = 0.5\nn = 18\nY = []\nb = [0] * (n + 1)\nN = 100\nfor j in range(N):\n for i in range(n):\n r = random.uniform(0, 1)\n if r < p:\n x = 1\n else:\n x = 0\n Y.append(x)\n outcome = sum(Y)\n b[outcome] = b[outcome] + 1\n Y.clear()\nfor i in range(n + 1):\n b[i] = b[i] / N\n p = 0\ncv = int(input('Enter a choice for the CV.'))\nfor i in range(cv, 19):\n p = p + b[i]\nprint('For a critical value of', cv,\n 'the probability of rejecting the old system in favor of a new system that is no better than is'\n , p, '.')\n",
"step-4": "import matplotlib.pyplot as pmf\nimport random\np = 0.5\nn = 18\nY = []\nb = [0] * (n + 1)\nN = 100\nfor j in range(N):\n for i in range(n):\n r = random.uniform(0, 1)\n if r < p:\n x = 1\n else:\n x = 0\n Y.append(x)\n outcome = sum(Y)\n b[outcome] = b[outcome] + 1\n Y.clear()\nfor i in range(n + 1):\n b[i] = b[i] / N\n p = 0\ncv = int(input('Enter a choice for the CV.'))\nfor i in range(cv, 19):\n p = p + b[i]\nprint('For a critical value of', cv,\n 'the probability of rejecting the old system in favor of a new system that is no better than is'\n , p, '.')\n",
"step-5": "#header\n\nimport matplotlib.pyplot as pmf\nimport random\n\np = 0.5 # Probablility of success for original system\nn = 18 # Number of trials\nY = [] # Contains binomial RVs\nb = [0] * (n+1) # List of n + 1 zeroes\nN = 100 # Number of experiments performed\n\nfor j in range(N):\n \n # Bernoulli random variable\n for i in range(n):\n \n r = random.uniform(0,1)\n if r < p:\n x = 1\n else:\n x = 0\n Y.append(x)\n outcome = sum(Y) # Number of successes from 0 to n\n b[outcome] = b[outcome] + 1 # Record of successes for bar plot\n Y.clear()\n \n \nfor i in range(n+1):\n b[i] = b[i]/N # Probabilities\n p = 0\n\ncv = int(input('Enter a choice for the CV.'))\n\nfor i in range(cv, 19):\n p = p + b[i]\n \nprint('For a critical value of', cv, 'the probability of rejecting the old system in favor of a new system that is no better than is', p,'.')\n#cv = 13, 1/20 or the 5% rule",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import cv2
import numpy as np
LOWEST_MATCHES_NUMBER = 30
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher();
train_img = cv2.imread('Photo/demo2.jpg', 0)
train_kp, train_desc = sift.detectAndCompute(train_img, None);
camera = cv2.VideoCapture(0);
while (True):
det, frame_with_color = camera.read();
frame = cv2.cvtColor(frame_with_color,cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame,None)
matches=bf.knnMatch(frame_desc,train_desc,k=2)
good = []
for m,n in matches:
if(m.distance < 0.75*n.distance):
good.append(m)
if(len(good)> LOWEST_MATCHES_NUMBER):
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points=np.float32((train_points,frame_points))
H,status=cv2.findHomography(train_points,frame_points,cv2.RANSAC,3.0)
h,w=train_img.shape
trainBorder=np.float32([[[0,0],[0,h-1],[w-1,h-1],[w-1,0]]])
queryBorder=cv2.perspectiveTransform(trainBorder,H)
cv2.polylines(frame_with_color,[np.int32(queryBorder)],True,(0,0,255),5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good), LOWEST_MATCHES_NUMBER))
cv2.imshow('result',frame_with_color)
if cv2.waitKey(5)==ord('q'):
break
camera.release()
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "1a78d9e0807824263fd46547d5b75c61610456d4",
"index": 1912,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n det, frame_with_color = camera.read()\n frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame, None)\n matches = bf.knnMatch(frame_desc, train_desc, k=2)\n good = []\n for m, n in matches:\n if m.distance < 0.75 * n.distance:\n good.append(m)\n if len(good) > LOWEST_MATCHES_NUMBER:\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points = np.float32((train_points, frame_points))\n H, status = cv2.findHomography(train_points, frame_points, cv2.\n RANSAC, 3.0)\n h, w = train_img.shape\n trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -\n 1, 0]]])\n queryBorder = cv2.perspectiveTransform(trainBorder, H)\n cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0, \n 0, 255), 5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),\n LOWEST_MATCHES_NUMBER))\n cv2.imshow('result', frame_with_color)\n if cv2.waitKey(5) == ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\nLOWEST_MATCHES_NUMBER = 30\nsift = cv2.xfeatures2d.SIFT_create()\nbf = cv2.BFMatcher()\ntrain_img = cv2.imread('Photo/demo2.jpg', 0)\ntrain_kp, train_desc = sift.detectAndCompute(train_img, None)\ncamera = cv2.VideoCapture(0)\nwhile True:\n det, frame_with_color = camera.read()\n frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame, None)\n matches = bf.knnMatch(frame_desc, train_desc, k=2)\n good = []\n for m, n in matches:\n if m.distance < 0.75 * n.distance:\n good.append(m)\n if len(good) > LOWEST_MATCHES_NUMBER:\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points = np.float32((train_points, frame_points))\n H, status = cv2.findHomography(train_points, frame_points, cv2.\n RANSAC, 3.0)\n h, w = train_img.shape\n trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -\n 1, 0]]])\n queryBorder = cv2.perspectiveTransform(trainBorder, H)\n cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0, \n 0, 255), 5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),\n LOWEST_MATCHES_NUMBER))\n cv2.imshow('result', frame_with_color)\n if cv2.waitKey(5) == ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n",
"step-4": "import cv2\nimport numpy as np\nLOWEST_MATCHES_NUMBER = 30\nsift = cv2.xfeatures2d.SIFT_create()\nbf = cv2.BFMatcher()\ntrain_img = cv2.imread('Photo/demo2.jpg', 0)\ntrain_kp, train_desc = sift.detectAndCompute(train_img, None)\ncamera = cv2.VideoCapture(0)\nwhile True:\n det, frame_with_color = camera.read()\n frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame, None)\n matches = bf.knnMatch(frame_desc, train_desc, k=2)\n good = []\n for m, n in matches:\n if m.distance < 0.75 * n.distance:\n good.append(m)\n if len(good) > LOWEST_MATCHES_NUMBER:\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points = np.float32((train_points, frame_points))\n H, status = cv2.findHomography(train_points, frame_points, cv2.\n RANSAC, 3.0)\n h, w = train_img.shape\n trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -\n 1, 0]]])\n queryBorder = cv2.perspectiveTransform(trainBorder, H)\n cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0, \n 0, 255), 5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),\n LOWEST_MATCHES_NUMBER))\n cv2.imshow('result', frame_with_color)\n if cv2.waitKey(5) == ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n",
"step-5": "import cv2\nimport numpy as np\n\nLOWEST_MATCHES_NUMBER = 30\n\nsift = cv2.xfeatures2d.SIFT_create()\nbf = cv2.BFMatcher();\n\ntrain_img = cv2.imread('Photo/demo2.jpg', 0)\ntrain_kp, train_desc = sift.detectAndCompute(train_img, None);\n\ncamera = cv2.VideoCapture(0);\nwhile (True):\n det, frame_with_color = camera.read();\n frame = cv2.cvtColor(frame_with_color,cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame,None)\n matches=bf.knnMatch(frame_desc,train_desc,k=2)\n good = []\n for m,n in matches:\n if(m.distance < 0.75*n.distance):\n good.append(m)\n if(len(good)> LOWEST_MATCHES_NUMBER):\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points=np.float32((train_points,frame_points))\n H,status=cv2.findHomography(train_points,frame_points,cv2.RANSAC,3.0)\n h,w=train_img.shape\n trainBorder=np.float32([[[0,0],[0,h-1],[w-1,h-1],[w-1,0]]])\n queryBorder=cv2.perspectiveTransform(trainBorder,H)\n cv2.polylines(frame_with_color,[np.int32(queryBorder)],True,(0,0,255),5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good), LOWEST_MATCHES_NUMBER))\n cv2.imshow('result',frame_with_color)\n if cv2.waitKey(5)==ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
Automatically create and parse commands
based on a YAML configuration file.
NOTE: we can't have a logger here,
before knowing the level of debug.
"""
import os
import sys
import argparse
from controller import __version__, PROJECTRC, PROJECTRC_ALTERNATIVE
from controller.conf_utilities import load_yaml_file
from controller import log
class ArgParser:
def __init__(self, args=None):
if args is None:
args = sys.argv
self.current_args = {}
self.host_configuration = {}
# This method can raise ValueErrors
self.check_args(args)
# This method saves configuration objects in self
self.read_configuration()
# Arguments definition
parser = argparse.ArgumentParser(
prog=args[0], description=self.parse_conf.get('description')
)
# PARAMETERS
sorted_options = sorted(self.parse_conf.get('options', {}).items())
for option_name, options in sorted_options:
self.add_parser_argument(parser, option_name, options)
version_string = 'rapydo version {}'.format(__version__)
parser.add_argument('--version', action='version', version=version_string)
# Sub-parser of commands [check, init, etc]
main_command = self.parse_conf.get('action')
subparsers = parser.add_subparsers(
title='Available commands',
dest=main_command.get('name'),
help=main_command.get('help'),
)
subparsers.required = True
# ##########################
# COMMANDS
# BASE normal commands
mycommands = self.parse_conf.get('subcommands', {})
for command_name, options in sorted(mycommands.items()):
# Creating a parser for each sub-command [check, init, etc]
subparse = subparsers.add_parser(
command_name, help=options.get('description')
)
# controlcommands = options.get('controlcommands', {})
# # Some subcommands can have further subcommands
# [control start, stop, etc]
# if len(controlcommands) > 0:
# innerparser = subparse.add_subparsers(
# dest='controlcommand'
# )
# innerparser.required = options.get('controlrequired', False)
# for subcommand, suboptions in controlcommands.items():
# subcommand_help = suboptions.pop(0)
# # Creating a parser for each sub-sub-command
# # [control start/stop]
# innerparser.add_parser(subcommand, help=subcommand_help)
suboptions = options.get('suboptions', {}).items()
for option_name, suboptions in suboptions:
self.add_parser_argument(subparse, option_name, suboptions)
# ##########################
# Print usage if no arguments provided
if len(args) == 1:
parser.print_help()
sys.exit(1)
# ##########################
# Reading input parameters
# Partial parsing
# https://docs.python.org/3.4/library/argparse.html#partial-parsing
# Example
# https://gist.github.com/von/949337/
# self.current_args = parser.parse_args()
current_args_namespace, self.remaining_args = parser.parse_known_args(args[1:])
self.current_args = vars(current_args_namespace)
# custom commands as a separate parser
self.extra_parser = argparse.ArgumentParser(
description='Custom rapydo commands from your own configuration',
add_help=False,
usage='\n$ rapydo custom CUSTOM_COMMAND',
)
self.extra_command_parser = self.extra_parser.add_subparsers(
title='Available custom commands',
dest='custom',
help='list of custom commands',
)
self.extra_command_parser.required = True
# ##########################
if self.current_args.get("log_level", "DEPRECATED") != "DEPRECATED":
# Deprecated since version 0.7.0
log.warning(
"--log-level parameter is deprecated, set env variable LOGURU_LEVEL")
log.verbose("Parsed arguments: {}", self.current_args)
def add_parser_argument(self, parser, option_name, options):
params = self.prepare_params(options)
alias = params.pop('alias', None)
positional = params.pop('positional', False)
param_name = '--{}'.format(option_name)
if positional:
parser.add_argument(option_name, **params)
elif alias is None:
parser.add_argument(param_name, **params)
else:
parser.add_argument(param_name, '-{}'.format(alias), **params)
@staticmethod
def check_args(args):
# Check on format
for element in args:
if element.startswith('--') and '_' in element:
raise ValueError(
"Wrong \"{}\" option provided.\n".format(element)
+ "Arguments containing '_' are not allowed.\n"
+ "Use '-' instead\n"
)
# NOTE: the standard is to use only '-' separators for arguments
# beware: argparse converts them into '_' when you want to retrieve
def read_configuration(self):
# READ MAIN FILE WITH COMMANDS AND OPTIONS
self.parse_conf = load_yaml_file(
'argparser.yaml', path=os.path.dirname(os.path.realpath(__file__))
)
try:
# READ PROJECT INIT FILE: .projectrc
pinit_conf = load_yaml_file(
PROJECTRC, path=os.curdir, is_optional=True)
# Allow alternative for PROJECT INIT FILE: .project.yml
if len(pinit_conf) < 1:
pinit_conf = load_yaml_file(
PROJECTRC_ALTERNATIVE, path=os.curdir, is_optional=True)
except AttributeError as e:
log.exit(e)
self.host_configuration = pinit_conf.pop('project_configuration', {})
# Mix with parse_conf
for key, value in pinit_conf.items():
# value = pinit_conf.get(key, None)
if value is None:
continue
if not isinstance(value, dict):
# This is a first level option
if key in self.parse_conf['options']:
self.parse_conf['options'][key]['default'] = value
else:
print("\nUnknown parameter {} found in {}\n".format(key, PROJECTRC))
else:
# This is a second level parameter
if key not in self.parse_conf['subcommands']:
print("\nUnknown command {} found in {}\n".format(key, PROJECTRC))
else:
conf = self.parse_conf['subcommands'][key]['suboptions']
for subkey, subvalue in value.items():
if subkey in conf:
conf[subkey]['default'] = subvalue
else:
print("Unknown parameter {}/{} found in {}\n".format(
key, subkey, PROJECTRC))
@staticmethod
def prepare_params(options):
pconf = {}
default = options.get('default')
pconf['default'] = default
myhelp = "{} [default: {}]".format(options.get('help'), default)
pconf['help'] = myhelp
if options.get('type') == 'bool':
if default:
pconf['action'] = 'store_false'
else:
pconf['action'] = 'store_true'
else:
# type and metavar are allowed for bool
pconf['type'] = str
pconf['metavar'] = options.get('metavalue')
if 'alias' in options:
pconf['alias'] = options['alias']
if 'positional' in options:
pconf['positional'] = options['positional']
return pconf
|
normal
|
{
"blob_id": "94559d9fd296acd468c33d6b0541b974575b8852",
"index": 4119,
"step-1": "<mask token>\n\n\nclass ArgParser:\n <mask token>\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ArgParser:\n <mask token>\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n myhelp = '{} [default: {}]'.format(options.get('help'), default)\n pconf['help'] = myhelp\n if options.get('type') == 'bool':\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n else:\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n if 'alias' in options:\n pconf['alias'] = options['alias']\n if 'positional' in options:\n pconf['positional'] = options['positional']\n return pconf\n",
"step-3": "<mask token>\n\n\nclass ArgParser:\n\n def __init__(self, args=None):\n if args is None:\n args = sys.argv\n self.current_args = {}\n self.host_configuration = {}\n self.check_args(args)\n self.read_configuration()\n parser = argparse.ArgumentParser(prog=args[0], description=self.\n parse_conf.get('description'))\n sorted_options = sorted(self.parse_conf.get('options', {}).items())\n for option_name, options in sorted_options:\n self.add_parser_argument(parser, option_name, options)\n version_string = 'rapydo version {}'.format(__version__)\n parser.add_argument('--version', action='version', version=\n version_string)\n main_command = self.parse_conf.get('action')\n subparsers = parser.add_subparsers(title='Available commands', dest\n =main_command.get('name'), help=main_command.get('help'))\n subparsers.required = True\n mycommands = self.parse_conf.get('subcommands', {})\n for command_name, options in sorted(mycommands.items()):\n subparse = subparsers.add_parser(command_name, help=options.get\n ('description'))\n suboptions = options.get('suboptions', {}).items()\n for option_name, suboptions in suboptions:\n self.add_parser_argument(subparse, option_name, suboptions)\n if len(args) == 1:\n parser.print_help()\n sys.exit(1)\n current_args_namespace, self.remaining_args = parser.parse_known_args(\n args[1:])\n self.current_args = vars(current_args_namespace)\n self.extra_parser = argparse.ArgumentParser(description=\n 'Custom rapydo commands from your own configuration', add_help=\n False, usage=\"\"\"\n$ rapydo custom CUSTOM_COMMAND\"\"\")\n self.extra_command_parser = self.extra_parser.add_subparsers(title=\n 'Available custom commands', dest='custom', help=\n 'list of custom commands')\n self.extra_command_parser.required = True\n if self.current_args.get('log_level', 'DEPRECATED') != 'DEPRECATED':\n log.warning(\n '--log-level parameter is deprecated, set env variable LOGURU_LEVEL'\n )\n log.verbose('Parsed arguments: {}', self.current_args)\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n myhelp = '{} [default: {}]'.format(options.get('help'), default)\n pconf['help'] = myhelp\n if options.get('type') == 'bool':\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n else:\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n if 'alias' in options:\n pconf['alias'] = options['alias']\n if 'positional' in options:\n pconf['positional'] = options['positional']\n return pconf\n",
"step-4": "<mask token>\nimport os\nimport sys\nimport argparse\nfrom controller import __version__, PROJECTRC, PROJECTRC_ALTERNATIVE\nfrom controller.conf_utilities import load_yaml_file\nfrom controller import log\n\n\nclass ArgParser:\n\n def __init__(self, args=None):\n if args is None:\n args = sys.argv\n self.current_args = {}\n self.host_configuration = {}\n self.check_args(args)\n self.read_configuration()\n parser = argparse.ArgumentParser(prog=args[0], description=self.\n parse_conf.get('description'))\n sorted_options = sorted(self.parse_conf.get('options', {}).items())\n for option_name, options in sorted_options:\n self.add_parser_argument(parser, option_name, options)\n version_string = 'rapydo version {}'.format(__version__)\n parser.add_argument('--version', action='version', version=\n version_string)\n main_command = self.parse_conf.get('action')\n subparsers = parser.add_subparsers(title='Available commands', dest\n =main_command.get('name'), help=main_command.get('help'))\n subparsers.required = True\n mycommands = self.parse_conf.get('subcommands', {})\n for command_name, options in sorted(mycommands.items()):\n subparse = subparsers.add_parser(command_name, help=options.get\n ('description'))\n suboptions = options.get('suboptions', {}).items()\n for option_name, suboptions in suboptions:\n self.add_parser_argument(subparse, option_name, suboptions)\n if len(args) == 1:\n parser.print_help()\n sys.exit(1)\n current_args_namespace, self.remaining_args = parser.parse_known_args(\n args[1:])\n self.current_args = vars(current_args_namespace)\n self.extra_parser = argparse.ArgumentParser(description=\n 'Custom rapydo commands from your own configuration', add_help=\n False, usage=\"\"\"\n$ rapydo custom CUSTOM_COMMAND\"\"\")\n self.extra_command_parser = self.extra_parser.add_subparsers(title=\n 'Available custom commands', dest='custom', help=\n 'list of custom commands')\n self.extra_command_parser.required = True\n if self.current_args.get('log_level', 'DEPRECATED') != 'DEPRECATED':\n log.warning(\n '--log-level parameter is deprecated, set env variable LOGURU_LEVEL'\n )\n log.verbose('Parsed arguments: {}', self.current_args)\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n myhelp = '{} [default: {}]'.format(options.get('help'), default)\n pconf['help'] = myhelp\n if options.get('type') == 'bool':\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n else:\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n if 'alias' in options:\n pconf['alias'] = options['alias']\n if 'positional' in options:\n pconf['positional'] = options['positional']\n return pconf\n",
"step-5": "# -*- coding: utf-8 -*-\n\n\"\"\"\nAutomatically create and parse commands\nbased on a YAML configuration file.\n\nNOTE: we can't have a logger here,\nbefore knowing the level of debug.\n\"\"\"\n\nimport os\nimport sys\nimport argparse\nfrom controller import __version__, PROJECTRC, PROJECTRC_ALTERNATIVE\nfrom controller.conf_utilities import load_yaml_file\nfrom controller import log\n\n\nclass ArgParser:\n def __init__(self, args=None):\n if args is None:\n args = sys.argv\n\n self.current_args = {}\n self.host_configuration = {}\n # This method can raise ValueErrors\n self.check_args(args)\n\n # This method saves configuration objects in self\n self.read_configuration()\n\n # Arguments definition\n parser = argparse.ArgumentParser(\n prog=args[0], description=self.parse_conf.get('description')\n )\n\n # PARAMETERS\n sorted_options = sorted(self.parse_conf.get('options', {}).items())\n for option_name, options in sorted_options:\n self.add_parser_argument(parser, option_name, options)\n\n version_string = 'rapydo version {}'.format(__version__)\n parser.add_argument('--version', action='version', version=version_string)\n # Sub-parser of commands [check, init, etc]\n main_command = self.parse_conf.get('action')\n\n subparsers = parser.add_subparsers(\n title='Available commands',\n dest=main_command.get('name'),\n help=main_command.get('help'),\n )\n\n subparsers.required = True\n\n # ##########################\n # COMMANDS\n\n # BASE normal commands\n mycommands = self.parse_conf.get('subcommands', {})\n\n for command_name, options in sorted(mycommands.items()):\n\n # Creating a parser for each sub-command [check, init, etc]\n subparse = subparsers.add_parser(\n command_name, help=options.get('description')\n )\n\n # controlcommands = options.get('controlcommands', {})\n # # Some subcommands can have further subcommands\n # [control start, stop, etc]\n # if len(controlcommands) > 0:\n # innerparser = subparse.add_subparsers(\n # dest='controlcommand'\n # )\n # innerparser.required = options.get('controlrequired', False)\n # for subcommand, suboptions in controlcommands.items():\n # subcommand_help = suboptions.pop(0)\n # # Creating a parser for each sub-sub-command\n # # [control start/stop]\n # innerparser.add_parser(subcommand, help=subcommand_help)\n\n suboptions = options.get('suboptions', {}).items()\n for option_name, suboptions in suboptions:\n self.add_parser_argument(subparse, option_name, suboptions)\n\n # ##########################\n # Print usage if no arguments provided\n if len(args) == 1:\n parser.print_help()\n sys.exit(1)\n\n # ##########################\n # Reading input parameters\n\n # Partial parsing\n # https://docs.python.org/3.4/library/argparse.html#partial-parsing\n # Example\n # https://gist.github.com/von/949337/\n\n # self.current_args = parser.parse_args()\n current_args_namespace, self.remaining_args = parser.parse_known_args(args[1:])\n self.current_args = vars(current_args_namespace)\n\n # custom commands as a separate parser\n self.extra_parser = argparse.ArgumentParser(\n description='Custom rapydo commands from your own configuration',\n add_help=False,\n usage='\\n$ rapydo custom CUSTOM_COMMAND',\n )\n self.extra_command_parser = self.extra_parser.add_subparsers(\n title='Available custom commands',\n dest='custom',\n help='list of custom commands',\n )\n self.extra_command_parser.required = True\n\n # ##########################\n if self.current_args.get(\"log_level\", \"DEPRECATED\") != \"DEPRECATED\":\n # Deprecated since version 0.7.0\n log.warning(\n \"--log-level parameter is deprecated, set env variable LOGURU_LEVEL\")\n\n log.verbose(\"Parsed arguments: {}\", self.current_args)\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n # Check on format\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError(\n \"Wrong \\\"{}\\\" option provided.\\n\".format(element)\n + \"Arguments containing '_' are not allowed.\\n\"\n + \"Use '-' instead\\n\"\n )\n # NOTE: the standard is to use only '-' separators for arguments\n # beware: argparse converts them into '_' when you want to retrieve\n\n def read_configuration(self):\n # READ MAIN FILE WITH COMMANDS AND OPTIONS\n\n self.parse_conf = load_yaml_file(\n 'argparser.yaml', path=os.path.dirname(os.path.realpath(__file__))\n )\n\n try:\n # READ PROJECT INIT FILE: .projectrc\n pinit_conf = load_yaml_file(\n PROJECTRC, path=os.curdir, is_optional=True)\n # Allow alternative for PROJECT INIT FILE: .project.yml\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(\n PROJECTRC_ALTERNATIVE, path=os.curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n\n # Mix with parse_conf\n for key, value in pinit_conf.items():\n # value = pinit_conf.get(key, None)\n\n if value is None:\n continue\n\n if not isinstance(value, dict):\n # This is a first level option\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print(\"\\nUnknown parameter {} found in {}\\n\".format(key, PROJECTRC))\n else:\n # This is a second level parameter\n if key not in self.parse_conf['subcommands']:\n print(\"\\nUnknown command {} found in {}\\n\".format(key, PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print(\"Unknown parameter {}/{} found in {}\\n\".format(\n key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n\n myhelp = \"{} [default: {}]\".format(options.get('help'), default)\n pconf['help'] = myhelp\n\n if options.get('type') == 'bool':\n\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n\n else:\n # type and metavar are allowed for bool\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n\n if 'alias' in options:\n pconf['alias'] = options['alias']\n\n if 'positional' in options:\n pconf['positional'] = options['positional']\n\n return pconf\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_import_error():
from cnct import ClientError as MovedClientError
assert MovedClientError == ClientError
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_import_error():
from cnct import ClientError as MovedClientError
assert MovedClientError == ClientError
def test_import_r():
from cnct import R as MovedR
assert MovedR == R
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_import_client():
from cnct import ConnectClient as MovedConnectClient
assert MovedConnectClient == ConnectClient
def test_import_error():
from cnct import ClientError as MovedClientError
assert MovedClientError == ClientError
def test_import_r():
from cnct import R as MovedR
assert MovedR == R
<|reserved_special_token_1|>
from connect.client import ClientError, ConnectClient, R
def test_import_client():
from cnct import ConnectClient as MovedConnectClient
assert MovedConnectClient == ConnectClient
def test_import_error():
from cnct import ClientError as MovedClientError
assert MovedClientError == ClientError
def test_import_r():
from cnct import R as MovedR
assert MovedR == R
|
flexible
|
{
"blob_id": "e5a71250ca9f17798011d8fbfaee6a3d55446598",
"index": 6145,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_import_error():\n from cnct import ClientError as MovedClientError\n assert MovedClientError == ClientError\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_import_error():\n from cnct import ClientError as MovedClientError\n assert MovedClientError == ClientError\n\n\ndef test_import_r():\n from cnct import R as MovedR\n assert MovedR == R\n",
"step-4": "<mask token>\n\n\ndef test_import_client():\n from cnct import ConnectClient as MovedConnectClient\n assert MovedConnectClient == ConnectClient\n\n\ndef test_import_error():\n from cnct import ClientError as MovedClientError\n assert MovedClientError == ClientError\n\n\ndef test_import_r():\n from cnct import R as MovedR\n assert MovedR == R\n",
"step-5": "from connect.client import ClientError, ConnectClient, R\n\n\ndef test_import_client():\n from cnct import ConnectClient as MovedConnectClient\n assert MovedConnectClient == ConnectClient\n\n\ndef test_import_error():\n from cnct import ClientError as MovedClientError\n assert MovedClientError == ClientError\n\n\ndef test_import_r():\n from cnct import R as MovedR\n assert MovedR == R\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class RegisterUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
def validate_password(self, password):
user = _build_initial_user(self.initial_data)
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField(write_only=True)
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise ValidationError(_("Passwords don't match"))
return attrs
<|reserved_special_token_0|>
class ChangePasswordSerializer(serializers.Serializer):
old_password = serializers.CharField()
password = serializers.CharField()
def validate_old_password(self, old_password):
user = self.context['request'].user
if not user.check_password(old_password):
raise serializers.ValidationError(_('Old password is not correct'))
return old_password
def validate_password(self, password):
user = self.context['request'].user
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField()
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise serializers.ValidationError(_("Passwords don't match"))
return attrs
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserGetProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = 'password',
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = 'password',
class RegisterUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
def validate_password(self, password):
user = _build_initial_user(self.initial_data)
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField(write_only=True)
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise ValidationError(_("Passwords don't match"))
return attrs
def create(self, validated_data):
data = validated_data.copy()
del data['password_confirm']
return self.Meta.model.objects.create_user(**data)
class ChangePasswordSerializer(serializers.Serializer):
old_password = serializers.CharField()
password = serializers.CharField()
def validate_old_password(self, old_password):
user = self.context['request'].user
if not user.check_password(old_password):
raise serializers.ValidationError(_('Old password is not correct'))
return old_password
def validate_password(self, password):
user = self.context['request'].user
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField()
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise serializers.ValidationError(_("Passwords don't match"))
return attrs
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LogoutSerializer(serializers.Serializer):
<|reserved_special_token_0|>
class UserGetProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = 'password',
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = 'password',
class RegisterUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
def validate_password(self, password):
user = _build_initial_user(self.initial_data)
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField(write_only=True)
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise ValidationError(_("Passwords don't match"))
return attrs
def create(self, validated_data):
data = validated_data.copy()
del data['password_confirm']
return self.Meta.model.objects.create_user(**data)
class ChangePasswordSerializer(serializers.Serializer):
old_password = serializers.CharField()
password = serializers.CharField()
def validate_old_password(self, old_password):
user = self.context['request'].user
if not user.check_password(old_password):
raise serializers.ValidationError(_('Old password is not correct'))
return old_password
def validate_password(self, password):
user = self.context['request'].user
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField()
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise serializers.ValidationError(_("Passwords don't match"))
return attrs
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LogoutSerializer(serializers.Serializer):
revoke_token = serializers.BooleanField(default=False)
class UserGetProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = 'password',
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = 'password',
class RegisterUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
def validate_password(self, password):
user = _build_initial_user(self.initial_data)
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField(write_only=True)
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise ValidationError(_("Passwords don't match"))
return attrs
def create(self, validated_data):
data = validated_data.copy()
del data['password_confirm']
return self.Meta.model.objects.create_user(**data)
class ChangePasswordSerializer(serializers.Serializer):
old_password = serializers.CharField()
password = serializers.CharField()
def validate_old_password(self, old_password):
user = self.context['request'].user
if not user.check_password(old_password):
raise serializers.ValidationError(_('Old password is not correct'))
return old_password
def validate_password(self, password):
user = self.context['request'].user
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField()
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise serializers.ValidationError(_("Passwords don't match"))
return attrs
<|reserved_special_token_1|>
from rest_framework import serializers
from django.contrib import auth
from rest_framework.exceptions import ValidationError
from django.contrib.auth.password_validation import validate_password
from django.utils.translation import gettext as _
from rest_users.utils.api import _build_initial_user
User = auth.get_user_model()
class LoginUserSerializer(serializers.Serializer):
login = serializers.CharField()
password = serializers.CharField()
def get_authenticated_user(self):
login, password = self.validated_data['login'], self.validated_data['password']
user = None
login_field_names = [User.USERNAME_FIELD, User.EMAIL_FIELD]
for field_name in login_field_names:
kwargs = {
field_name: login,
'password': password,
}
user = auth.authenticate(**kwargs)
if user:
break
return user
class LogoutSerializer(serializers.Serializer):
revoke_token = serializers.BooleanField(default=False)
class UserGetProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ('password',)
class RegisterUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
def validate_password(self, password):
user = _build_initial_user(self.initial_data)
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField(write_only=True)
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise ValidationError(_("Passwords don't match"))
return attrs
def create(self, validated_data):
data = validated_data.copy()
del data['password_confirm']
return self.Meta.model.objects.create_user(**data)
class ChangePasswordSerializer(serializers.Serializer):
old_password = serializers.CharField()
password = serializers.CharField()
def validate_old_password(self, old_password):
user = self.context['request'].user
if not user.check_password(old_password):
raise serializers.ValidationError(_("Old password is not correct"))
return old_password
def validate_password(self, password):
user = self.context['request'].user
validate_password(password, user=user)
return password
def get_fields(self):
fields = super().get_fields()
fields['password_confirm'] = serializers.CharField()
return fields
def validate(self, attrs):
if attrs['password'] != attrs['password_confirm']:
raise serializers.ValidationError(_("Passwords don't match"))
return attrs
|
flexible
|
{
"blob_id": "88e34878cdad908ed4ac30da82355aaa46ed719b",
"index": 5429,
"step-1": "<mask token>\n\n\nclass RegisterUserSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n fields = '__all__'\n\n def validate_password(self, password):\n user = _build_initial_user(self.initial_data)\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField(write_only=True)\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise ValidationError(_(\"Passwords don't match\"))\n return attrs\n <mask token>\n\n\nclass ChangePasswordSerializer(serializers.Serializer):\n old_password = serializers.CharField()\n password = serializers.CharField()\n\n def validate_old_password(self, old_password):\n user = self.context['request'].user\n if not user.check_password(old_password):\n raise serializers.ValidationError(_('Old password is not correct'))\n return old_password\n\n def validate_password(self, password):\n user = self.context['request'].user\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField()\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise serializers.ValidationError(_(\"Passwords don't match\"))\n return attrs\n",
"step-2": "<mask token>\n\n\nclass UserGetProfileSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n exclude = 'password',\n\n\nclass UserProfileSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n exclude = 'password',\n\n\nclass RegisterUserSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n fields = '__all__'\n\n def validate_password(self, password):\n user = _build_initial_user(self.initial_data)\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField(write_only=True)\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise ValidationError(_(\"Passwords don't match\"))\n return attrs\n\n def create(self, validated_data):\n data = validated_data.copy()\n del data['password_confirm']\n return self.Meta.model.objects.create_user(**data)\n\n\nclass ChangePasswordSerializer(serializers.Serializer):\n old_password = serializers.CharField()\n password = serializers.CharField()\n\n def validate_old_password(self, old_password):\n user = self.context['request'].user\n if not user.check_password(old_password):\n raise serializers.ValidationError(_('Old password is not correct'))\n return old_password\n\n def validate_password(self, password):\n user = self.context['request'].user\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField()\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise serializers.ValidationError(_(\"Passwords don't match\"))\n return attrs\n",
"step-3": "<mask token>\n\n\nclass LogoutSerializer(serializers.Serializer):\n <mask token>\n\n\nclass UserGetProfileSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n exclude = 'password',\n\n\nclass UserProfileSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n exclude = 'password',\n\n\nclass RegisterUserSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n fields = '__all__'\n\n def validate_password(self, password):\n user = _build_initial_user(self.initial_data)\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField(write_only=True)\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise ValidationError(_(\"Passwords don't match\"))\n return attrs\n\n def create(self, validated_data):\n data = validated_data.copy()\n del data['password_confirm']\n return self.Meta.model.objects.create_user(**data)\n\n\nclass ChangePasswordSerializer(serializers.Serializer):\n old_password = serializers.CharField()\n password = serializers.CharField()\n\n def validate_old_password(self, old_password):\n user = self.context['request'].user\n if not user.check_password(old_password):\n raise serializers.ValidationError(_('Old password is not correct'))\n return old_password\n\n def validate_password(self, password):\n user = self.context['request'].user\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField()\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise serializers.ValidationError(_(\"Passwords don't match\"))\n return attrs\n",
"step-4": "<mask token>\n\n\nclass LogoutSerializer(serializers.Serializer):\n revoke_token = serializers.BooleanField(default=False)\n\n\nclass UserGetProfileSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n exclude = 'password',\n\n\nclass UserProfileSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n exclude = 'password',\n\n\nclass RegisterUserSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n fields = '__all__'\n\n def validate_password(self, password):\n user = _build_initial_user(self.initial_data)\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField(write_only=True)\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise ValidationError(_(\"Passwords don't match\"))\n return attrs\n\n def create(self, validated_data):\n data = validated_data.copy()\n del data['password_confirm']\n return self.Meta.model.objects.create_user(**data)\n\n\nclass ChangePasswordSerializer(serializers.Serializer):\n old_password = serializers.CharField()\n password = serializers.CharField()\n\n def validate_old_password(self, old_password):\n user = self.context['request'].user\n if not user.check_password(old_password):\n raise serializers.ValidationError(_('Old password is not correct'))\n return old_password\n\n def validate_password(self, password):\n user = self.context['request'].user\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField()\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise serializers.ValidationError(_(\"Passwords don't match\"))\n return attrs\n",
"step-5": "from rest_framework import serializers\nfrom django.contrib import auth\nfrom rest_framework.exceptions import ValidationError\nfrom django.contrib.auth.password_validation import validate_password\nfrom django.utils.translation import gettext as _\nfrom rest_users.utils.api import _build_initial_user\n\nUser = auth.get_user_model()\n\n\nclass LoginUserSerializer(serializers.Serializer):\n login = serializers.CharField()\n password = serializers.CharField()\n\n def get_authenticated_user(self):\n login, password = self.validated_data['login'], self.validated_data['password']\n user = None\n login_field_names = [User.USERNAME_FIELD, User.EMAIL_FIELD]\n\n for field_name in login_field_names:\n kwargs = {\n field_name: login,\n 'password': password,\n }\n user = auth.authenticate(**kwargs)\n if user:\n break\n\n return user\n\n\nclass LogoutSerializer(serializers.Serializer):\n revoke_token = serializers.BooleanField(default=False)\n\n\nclass UserGetProfileSerializer(serializers.ModelSerializer):\n\n class Meta:\n model = User\n exclude = ('password',)\n\n\nclass UserProfileSerializer(serializers.ModelSerializer):\n class Meta:\n model = User\n exclude = ('password',)\n\n\nclass RegisterUserSerializer(serializers.ModelSerializer):\n class Meta:\n model = User\n fields = '__all__'\n\n def validate_password(self, password):\n user = _build_initial_user(self.initial_data)\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField(write_only=True)\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise ValidationError(_(\"Passwords don't match\"))\n return attrs\n\n def create(self, validated_data):\n data = validated_data.copy()\n del data['password_confirm']\n return self.Meta.model.objects.create_user(**data)\n\n\nclass ChangePasswordSerializer(serializers.Serializer):\n old_password = serializers.CharField()\n password = serializers.CharField()\n\n def validate_old_password(self, old_password):\n user = self.context['request'].user\n if not user.check_password(old_password):\n raise serializers.ValidationError(_(\"Old password is not correct\"))\n return old_password\n\n def validate_password(self, password):\n user = self.context['request'].user\n validate_password(password, user=user)\n return password\n\n def get_fields(self):\n fields = super().get_fields()\n fields['password_confirm'] = serializers.CharField()\n return fields\n\n def validate(self, attrs):\n if attrs['password'] != attrs['password_confirm']:\n raise serializers.ValidationError(_(\"Passwords don't match\"))\n return attrs\n\n",
"step-ids": [
10,
13,
14,
15,
21
]
}
|
[
10,
13,
14,
15,
21
] |
"""
Make html galleries from media directories. Organize by dates, by subdirs or by
the content of a diary file. The diary file is a markdown file organized by
dates, each day described by a text and some medias (photos and movies).
The diary file can be exported to:
* an html file with the text and subset of medias associated with each day,
* the previous html file extended with all medias in the media directory,
* an html file ready to import into Blogger.
"""
import sys
import os
import argparse
import glob
import shutil
import re
import io
import bisect
import locale
import textwrap
import base64
import datetime
import urllib
from configparser import ConfigParser
from collections import defaultdict
from subprocess import check_output, CalledProcessError, STDOUT
from urllib.request import urlopen
import colorama
import clipboard
import PIL
from PIL import Image, ImageChops
from lxml import objectify
import markdown
USAGE = """
galerie --gallery <root-dir> [--sourcedir <media-dir>]
[--bydir true|false*]
[--bydate true|false*]
[--diary true|false*]
[--recursive true|false*]
[--dates source*|diary|<yyyymmdd-yyyymmdd>]
[--github_pages true|false]
[--dest <directory>]
[--forcethumb]
galerie --update <root-dir>
galerie --create <root-dir> --sourcedir <media-dir>
[--recursive true|false*]
[--dates source*|<yyyymmdd-yyyymmdd>]
galerie --blogger <root-dir> --url <url>
[--check]
[--full]
[--dest <filename>]
Notes:
- * gives default
- all options can be abbreviated if there is no conflict with other options (--gallery --> --gal)
"""
# -- Post objects -------------------------------------------------------------
CAPTION_IMAGE_STYLE = '''\
<style type="text/css">
span { display:inline-table; }
</style>\
'''
STYLE = '''\
<style type="text/css">
p { margin-top:0px; margin-bottom:0px; }
h3 { font-size: 100%%; font-weight: bold; margin-top:0px; margin-bottom:0px; }
</style>
'''
START = f'''\
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>%s</title>
<link rel="icon" href="favicon.ico" />
<meta name="viewport" content="width=device-width">
<link rel="stylesheet" href="photobox/photobox.css">
<script src="photobox/jquery.min.js"></script>
<script src="photobox/jquery.photobox.js"></script>
{CAPTION_IMAGE_STYLE}
{STYLE}
</head>
<body>\
'''
BUTTONS = '''\
<button id="btn_full" type="button" style="position: fixed; width: 50px; top: 20px; right: 20px; background-color:white">Full</button>
<button id="btn_blog" type="button" style="position: fixed; width: 50px; top: 40px; right: 20px; background-color:white">Diary</button>
<button id="btn_text" type="button" style="position: fixed; width: 50px; top: 60px; right: 20px; background-color:white">Text</button>
<script>
$('#btn_full').click(function() {
$("[id^=gallery-blog]").show();
$("[id^=gallery-dcim]").show();
$("div.extra").show();
});
$('#btn_text').click(function() {
$("[id^=gallery-blog]").hide();
$("[id^=gallery-dcim]").hide();
$("div.extra").hide();
});
$('#btn_blog').click(function() {
$("[id^=gallery-blog]").show();
$("[id^=gallery-dcim]").hide();
$("div.extra").hide();
});
</script>
'''
SUBDIR_BACKCOL = '#eee'
END = '</body>\n</html>'
SEP = '<hr color="#C0C0C0" size="1" />'
IMGPOST = '<a href="%s"><img src="%s" width="%d" height="%d" title="%s"/></a>'
VIDPOST = '<a href="%s" rel="video"><img src="%s" width="%d" height="%d" title="%s"/></a>'
IMGPOSTCAPTION = '''\
<span>
<a href="%s"><img src=%s width="%d" height="%d" title="%s"/></a>
<p>%s</p>
</span>
'''
VIDPOSTCAPTION = '''\
<span>
<a href="%s" rel="video"><img src=%s width="%d" height="%d" title="%s"/></a>
<p>%s</p>
</span>
'''
IMGDCIM = '<a href="%s"><img src="%s" width="%d" height="%d" title="%s"/></a>'
VIDDCIM = '<a href="%s" rel="video"><img src="%s" width="%d" height="%d" title="%s"/></a>'
# diminution de l'espace entre images, on utilise :
# "display: block;", "margin-bottom: 0em;" et "font-size: 0;"
# "display: block;" dans img : espacement correct ordi mais pas centré téléphone
# "display: block;" dans a : ok
DIRPOST = '<a href="%s"><img src="%s" width="%d" height="%d" style="border: 1px solid #C0C0C0;" /></a>'
DIRPOSTCAPTION = f'''
<span style="background-color:{SUBDIR_BACKCOL}; margin-bottom: 8px; border: 1px solid #C0C0C0;">
<a href="%s"><img src="%s" width="%d" height="%d" style="border: 1px solid #C0C0C0;" /></a>
<p style="margin-left:2px;">%s</p>
</span>
'''
BIMGPAT = '''\
<div class="separator" style="clear: both; text-align: center;">
<a href="%s" style="clear: left; margin-bottom: 0em; margin-right: 1em; font-size: 0; display: block;">
<img border="0" src="%s" width="640" />
</a></div>
'''
CAPTION_PAT = '''\
<div class="separator" style="clear: both; text-align: center;">
%s
</div>
'''
class Post:
def __init__(self, date, text, medias):
# date: yyyymmdd
self.date = date
self.text = text
self.medias = medias
self.dcim = []
self.daterank = 0
self.extra = False
def __lt__(self, other):
return self.date < other.date
@classmethod
def from_markdown(cls, post):
m = re.match(r'\[(\d\d\d\d/\d\d/\d\d)\]\n*', post[0])
if m:
date = m.group(1).replace('/', '')
if not validate_date(date):
error('Incorrect date value:', date)
del post[0]
else:
error('No date in post', ' '.join(post))
while post and not post[0].strip():
del post[0]
text = ''
while post and not re.match(r'!?\[\]', post[0]):
text += post[0]
del post[0]
# remove empty lines at end
text = re.sub(r'\n\n$', '\n', text)
medias = list()
while post and (match := re.match(r'!?\[\]\((.*)\)', post[0])):
media = match.group(1)
caption = None
del post[0]
if post and not re.match(r'!?\[\]', post[0]):
caption = post[0].strip()
del post[0]
if match.group(0)[0] == '!':
medias.append(PostImage(caption, media))
else:
medias.append(PostVideo(caption, media))
return cls(date, text, medias)
@classmethod
def from_date(cls, date):
dt = datetime.datetime.strptime(date, '%Y%m%d')
datetext = dt.strftime("%A %d %B %Y").capitalize()
post = cls(date, text=datetext, medias=[])
post.daterank = 1
return post
def to_html(self, args, target='regular'):
if target == 'regular':
if args.diary:
return self.to_html_diary(args)
else:
return self.to_html_regular(args)
if target == 'blogger':
return self.to_html_blogger()
def to_html_regular(self, args):
html = list()
if self.text:
# possible with --bydate
html.append(markdown.markdown(self.text))
subdirs, dcim = dispatch_post_items(self.dcim)
if self.dcim:
html.append(SEP)
for media in subdirs:
html.append(media.to_html_dcim(args))
if dcim:
html.append(f'<div id="gallery-dcim-{self.date}-{self.daterank}">')
for media in dcim:
html.append(media.to_html_dcim(args))
html.append('</div>')
html.append(SEP)
return html
def to_html_diary(self, args):
html = list()
if self.extra:
html.append('<div class="extra">')
if self.text:
html.append(markdown.markdown(self.text))
if self.medias:
html.append(f'<div id="gallery-blog-{self.date}-{self.daterank}">')
for media in self.medias:
html.append(media.to_html_post(args))
html.append('</div>')
_, dcim = dispatch_post_items(self.dcim)
if dcim:
html.append(f'<div id="gallery-dcim-{self.date}-{self.daterank}">')
html.append(SEP)
for media in dcim:
html.append(media.to_html_dcim(args))
html.append('</div>')
html.append(SEP)
if self.extra:
html.append('</div>')
return html
def to_html_blogger(self):
html = list()
html.append(markdown.markdown(self.text))
for image in self.medias:
html.append(image.to_html_blogger())
html.append(SEP)
return html
class PostItem:
def __init__(self, caption, uri, thumb=None, thumbsize=None, descr=''):
self.caption = caption
self.uri = uri
self.basename = os.path.basename(uri)
self.thumb = thumb
self.thumbsize = thumbsize
self.descr = descr
self.resized_url = None
class PostImage(PostItem):
def to_markdown(self):
if not self.caption:
return '' % (self.uri,)
else:
return '\n%s' % (self.uri, self.caption)
def to_html_post(self, args):
descr = self.descr if args.thumbnails.media_description else ''
if not self.caption:
return IMGPOST % (self.uri, self.thumb, *self.thumbsize, descr)
else:
return IMGPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize, descr, self.caption)
def to_html_dcim(self, args):
descr = self.descr if args.thumbnails.media_description else ''
return IMGDCIM % (relative_url(self.uri, args.root), self.thumb, *self.thumbsize, descr)
def to_html_blogger(self):
if not self.caption:
return BIMGPAT % (self.uri, self.resized_url)
else:
return f'{BIMGPAT}\n{CAPTION_PAT}' % (self.uri, self.resized_url, self.caption)
class PostVideo(PostItem):
def to_markdown(self):
if not self.caption:
return '[](%s)' % (self.uri,)
else:
return '[](%s)\n%s' % (self.uri, self.caption)
def to_html_post(self, args):
descr = self.descr if args.thumbnails.media_description else ''
if not self.caption:
return VIDPOST % (self.uri, self.thumb, *self.thumbsize, descr)
else:
return VIDPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize, descr, self.caption)
def to_html_dcim(self, args):
descr = self.descr if args.thumbnails.media_description else ''
return VIDDCIM % (relative_url(self.uri, args.root), self.thumb, *self.thumbsize, descr)
def to_html_blogger(self):
x = f'<p style="text-align: center;">{self.iframe}</p>'
if not self.caption:
return x
else:
return f'%s\n{CAPTION_PAT}' % (x, self.caption)
class PostSubdir(PostItem):
def to_html_dcim(self, args):
basename = os.path.basename(self.htmname)
posts = self.posts
title = self.caption
print_html(args, posts, title, self.htmname)
if not self.caption:
return DIRPOST % (basename, self.thumb, *self.thumbsize)
else:
return DIRPOSTCAPTION % (basename, self.thumb, *self.thumbsize, self.caption)
def relative_url(path, root):
"""
returns a normalized url to path relative from root
"""
try:
url = os.path.relpath(path, root)
except:
error('Unable to make a relative url:', url, root)
url = url.replace('\\', '/') if os.sep == '\\' else url
return urllib.parse.quote(url)
# -- Markdown parser ----------------------------------------------------------
def parse_markdown(filename):
"""
Generate Post objects from markdown. Date must be present in each post and
posts must be ordrered by date.
"""
if not os.path.exists(filename):
error('File not found', filename)
posts = list()
with open(filename, encoding='utf-8') as f:
line = next(f)
if line.startswith('# '):
title = line[2:].strip()
record = []
next(f)
else:
title = None
record = [line]
for line in f:
if not line.startswith('___'):
record.append(line)
else:
posts.append(Post.from_markdown(record))
record = []
# set rank of posts in date
daterank = defaultdict(int)
for post in posts:
daterank[post.date] += 1
post.daterank = daterank[post.date]
# check post order
for post1, post2 in zip(posts[:-1], posts[1:]):
if post1.date > post2.date:
error('Posts are not ordered', f'{post1.date} > {post2.date}')
return title, posts
# -- Markdown printer ---------------------------------------------------------
def print_markdown(posts, title, fullname):
with open(fullname, 'wt', encoding='utf-8') as fdst:
print(f'# {title}\n', file=fdst)
for post in posts:
date = f'[{post.date[0:4]}/{post.date[4:6]}/{post.date[6:8]}]'
print(date, file=fdst)
if post.text:
print(file=fdst)
for line in post.text.splitlines():
if not line:
print(file=fdst)
else:
for chunk in textwrap.wrap(line, width=78):
print(chunk, file=fdst)
if post.medias:
print(file=fdst)
for media in post.medias:
print(media.to_markdown(), file=fdst)
print('______', file=fdst)
# -- html printer -------------------------------------------------------------
def compose_html_reduced(args, posts, title, target):
html = list()
html.append(START % title)
for post in posts:
for line in post.to_html(args, target):
html.append(line.strip())
html.append('')
html.append(END)
return html
def compose_html_full(args, posts, title, target):
html = list()
html.append(START % title)
if args.diary:
html.append(BUTTONS)
for post in posts:
for line in post.to_html(args, target):
html.append(line.strip())
html.append('')
html.append('<script>')
for post in posts:
if post.medias:
gallery_id = f'gallery-blog-{post.date}-{post.daterank}'
html.append(gallery_call(args, gallery_id))
if post.dcim:
gallery_id = f'gallery-dcim-{post.date}-{post.daterank}'
html.append(gallery_call(args, gallery_id))
html.append('</script>')
html.append(END)
return html
def print_html_to_stream(args, posts, title, stream, target):
if target == 'regular':
for line in compose_html_full(args, posts, title, target):
print(line, file=stream)
else:
for line in compose_html_reduced(args, posts, title, target):
print(line, file=stream)
def print_html(args, posts, title, html_name, target='regular'):
assert target in ('regular', 'blogger')
with io.StringIO() as f:
print_html_to_stream(args, posts, title, f, target)
html = f.getvalue()
if html_name:
if os.path.exists(html_name):
# test if the generated html is identical to the one already on disk
with open(html_name, 'rt', encoding='utf-8') as f:
html0 = f.read()
if html == html0:
return None
with open(html_name, 'wt', encoding='utf-8') as f:
f.write(html)
return None
else:
return html
GALLERYCALL = """
$('#%s').photobox('a', {
loop:%s,
thumbs:%s,
autoplay:%s,
time:%d,
zoomable:%s ,
rotatable:%s,
wheelNextPrev:%s
});
"""
def gallery_call(args, gallery_id):
return GALLERYCALL.replace('\n', '') % (
gallery_id,
str(args.photobox.loop).lower(),
str(args.photobox.thumbs).lower(),
str(args.photobox.autoplay).lower(),
args.photobox.time,
str(args.photobox.zoomable).lower(),
str(args.photobox.rotatable).lower(),
str(args.photobox.wheelNextPrev).lower(),
)
# -- Media description --------------------------------------------------------
def is_image_file(name):
return os.path.splitext(name)[1].lower() in (
'.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp', '.tif'
)
def is_video_file(name):
return os.path.splitext(name)[1].lower() in (
'.mp4', '.webm', '.mkv', '.flv', '.m4v', '.avi', '.wmv', '.mts', '.vob', '.divx'
)
def is_media(name):
return is_image_file(name) or is_video_file(name)
def validate_date(datestr):
# datestr = yyyymmdd
try:
datetime.datetime.strptime(datestr, '%Y%m%d')
return True
except ValueError:
return False
def date_from_name(name):
# heuristics
if match := re.search(r'(?:\D|^)(\d{8})(?:\D|$)', name, re.ASCII):
digits = match.group(1)
if validate_date(digits):
return digits
return None
def date_from_item(filename):
if date := date_from_name(filename):
return date
else:
timestamp = os.path.getmtime(filename)
return datetime.datetime.fromtimestamp(timestamp).strftime('%Y%m%d')
def time_from_name(name):
# heuristics
if match := re.search(r'(?:\D|^)(\d{8})\D(\d{6})(?:\D|$)', name, re.ASCII):
digits = match.group(2)
hour, minute, second = int(digits[0:2]), int(digits[2:4]), int(digits[4:6])
if 0 <= hour < 24 and 0 <= minute < 60 and 0 <= second < 60:
return digits
return None
def time_from_item(filename):
if time := time_from_name(filename):
return time
else:
timestamp = os.path.getmtime(filename)
return datetime.datetime.fromtimestamp(timestamp).strftime('%H%M%S')
FFPROBE_CMD = '''\
ffprobe -v error
-select_streams v:0
-show_entries stream=width,height,avg_frame_rate,r_frame_rate:format=duration
-of csv=p=0
'''
def get_image_info(filename):
date = date_from_item(filename)
time = time_from_item(filename)
img = Image.open(filename)
width, height = img.size
size = round(os.path.getsize(filename) / 1e6, 1)
return (date, time, width, height, size), f'{date} {time}, dim={width}x{height}, {size} MB'
def get_video_info(filename, info_fullname):
if os.path.exists(info_fullname):
with open(info_fullname) as f:
info = f.readline().split()
date, time, width, height, size, duration, fps = info[0], info[1], int(info[2]), int(info[3]), float(info[4]), int(info[5]), float(info[6])
formatted_info = format_video_info(date, time, width, height, size, duration, fps)
return (date, time, width, height, size, duration, fps), formatted_info
else:
info, formatted_info = make_video_info(filename, info_fullname)
with open(info_fullname, 'wt') as f:
print(' '.join([str(_) for _ in info]), file=f)
return info, formatted_info
def make_video_info(filename, info_fullname):
# ffmpeg must be in path
date = date_from_item(filename)
time = time_from_item(filename)
command = [*FFPROBE_CMD.split(), filename]
try:
output = check_output(command, stderr=STDOUT).decode()
width, height, fps, duration = parse_ffprobe_output(output)
size = round(os.path.getsize(filename) / 1e6, 1)
output = format_video_info(date, time, width, height, size, duration, fps)
except CalledProcessError as e:
output = e.output.decode()
warning(output)
raise
return (date, time, width, height, size, duration, fps), output
def parse_ffprobe_output(ffprobe_output):
# parse first channel data and last line for duration
match = re.match(r'(\d+),(\d+),(\d+)/(\d+),(\d+/\d+).*\s(\d+\.\d+)', ffprobe_output, re.DOTALL)
width = int(match.group(1))
height = int(match.group(2))
fps = round(int(match.group(3)) / int(match.group(4)), 1)
duration = round(float(match.group(6)))
return width, height, fps, duration
def format_video_info(date, time, width, height, size, duration, fps):
return f'{date} {time}, dim={width}x{height}, {format_duration(duration)}, fps={fps}, {size} MB'
def format_duration(duration):
mn = duration // 60
sec = duration % 60
if mn <= 59:
return f'm:s={mn:02}:{sec:02}'
else:
hour = mn // 60
mn = mn % 60
return f'h:m:s={hour:02}:{mn:02}:{sec:02}'
# -- Thumbnails (image and video) ---------------------------------------------
def thumbname(name, key):
return key + '-' + name + '.jpg'
def size_thumbnail(width, height, maxdim):
if width >= height:
return maxdim, int(round(maxdim * height / width))
else:
return int(round(maxdim * width / height)), maxdim
def make_thumbnail_image(args, image_name, thumb_name, size):
if os.path.exists(thumb_name) and args.forcethumb is False:
pass
else:
print('Making thumbnail:', thumb_name)
create_thumbnail_image(image_name, thumb_name, size)
def create_thumbnail_image(image_name, thumb_name, size):
imgobj = Image.open(image_name)
if (imgobj.mode != 'RGBA'
and image_name.endswith('.jpg')
and not (image_name.endswith('.gif') and imgobj.info.get('transparency'))
):
imgobj = imgobj.convert('RGBA')
imgobj.thumbnail(size, Image.LANCZOS)
imgobj = imgobj.convert('RGB')
imgobj.save(thumb_name)
def make_thumbnail_video(args, video_name, thumb_name, size, duration):
if os.path.exists(thumb_name) and args.forcethumb is False:
pass
else:
print('Making thumbnail:', thumb_name)
create_thumbnail_video(args, video_name, thumb_name, size, duration)
# base64 video.png
VIDEO_ICON = '''\
iVBORw0KGgoAAAANSUhEUgAAABgAAAAUCAAAAACy3qJfAAAA4UlEQVR4
2m1QoRbCMAy88SaK69xscfuEWiS4SZBIcCCRfAL8An8AcnJzTOJSWdxwzJXSPUoHRPQlueYuucigxm
9kDGaMf8AjopGcYn8LmmyLoihBWBiThb+5MTuUsc3aL56upneZ9sByAIg8Z8BEn96EeZ65iU7DvmbP
PxqDcH6p1swXBC4l6yZskACkTN1WrQr2SlIFhTtgqeZa+zsOogLXegvEocZ5c/W5BcoVNNCg3hSudV
/hEh4ofw6cEb00Km8i0dpRDUXfKiaQOEAdrUDo4dFp9C33jjaRac9/gDF/AlplVYtfWGCjAAAAAElF
TkSuQmCC'''
def create_thumbnail_video(args, filename, thumbname, size, duration):
# ffmpeg must be in path
delay = min(duration - 1, args.thumbnails.thumbdelay)
sizearg = '%dx%d' % size
command = 'ffmpeg -y -v error -itsoffset -%d -i "%s" -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s "%s"'
command = command % (delay, filename, sizearg, thumbname)
result = os.system(command)
# add a movie icon to the thumbnail to identify videos
try:
img1 = Image.open(thumbname)
except:
# ffmpeg was unable to save thumbnail
warning('Unable to save thumbnail for', filename)
return
img2 = Image.open(io.BytesIO(base64.b64decode(VIDEO_ICON)))
width, height = img1.size
img1.paste(img2, (6, height - 20 - 6), None)
img1.save(thumbname)
def make_thumbnail_subdir(args, subdir_name, thumb_name, size, items, thumbdir):
# subdir thumbnails are always created as they depend on the content of the
# directory
print('Making thumbnail:', thumb_name)
create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir)
def create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir):
def size_thumbnail(width, height, xmax, ymax):
width2 = xmax
height2 = int(round(xmax * height / width))
if height2 < ymax:
width2 = int(round(ymax * width / height))
height2 = ymax
return width2, height2
thumblist = [os.path.basename(item.thumb) for item in items]
widthnum, heightnum, width, height, offsetx, offsety = mosaic_geometry(size, thumblist)
thumbnum = widthnum * heightnum
img = Image.new('RGB', size, SUBDIR_BACKCOL)
for ind, thumb in enumerate(thumblist[:min(thumbnum, len(thumblist))]):
row = ind // widthnum
col = ind % widthnum
img2 = Image.open(os.path.join(thumbdir, thumb))
w, h = size_thumbnail(*img2.size, width[col], height[row])
cropdim = ((w - width[col]) // 2, (h - height[row]) // 2,
(w - width[col]) // 2 + width[col], (h - height[row]) // 2 + height[row])
img2 = img2.resize((w, h), Image.LANCZOS)
img2 = img2.crop(cropdim)
img.paste(img2, (offsetx[col], offsety[row]))
if os.path.exists(thumb_name):
# test if the generated thumbnail is identical to the one already on disk
imgref = Image.open(thumb_name)
# must save and reload before comparing
byteio = io.BytesIO()
img.save(byteio, "JPEG")
byteio.seek(0)
imgnew = Image.open(byteio)
diff = ImageChops.difference(imgnew, imgref)
if diff.getbbox() is None:
return
img.save(thumb_name)
def mosaic_geometry(size, thumblist):
if len(thumblist) == 1:
widthnum = 1
heightnum = 1
elif len(thumblist) <= 3:
widthnum = 1
heightnum = 2
elif len(thumblist) <= 8:
widthnum = 2
heightnum = 2
else:
widthnum = 3
heightnum = 3
if widthnum == 1:
width = [size[0] - 2]
else:
width = [size[0] // widthnum - 2] * (widthnum - 1)
width.append(size[0] - (1 + sum(width) + 2 * len(width) + 1))
if heightnum == 1:
height = [size[1] - 2]
else:
height = [size[1] // heightnum - 2] * (heightnum - 1)
height.append(size[1] - (1 + sum(height) + 2 * len(height) + 1))
offsetx = [1]
for w in width[:-1]:
offsetx.append(offsetx[-1] + w + 2)
offsety = [1]
for h in height[:-1]:
offsety.append(offsety[-1] + h + 2)
return widthnum, heightnum, width, height, offsetx, offsety
def list_of_htmlfiles(args, posts):
htmlist = list()
htmlist.append(os.path.join(args.dest, args.rootname))
for post in posts:
htmlist.extend(list_of_htmlfiles_in_items(post.dcim))
return htmlist
def list_of_htmlfiles_in_items(itemlist):
htmlist = list()
for item in itemlist:
if type(item) == PostSubdir:
htmlist.append(item.htmname)
htmlist.extend(list_of_htmlfiles_in_items(item.sublist))
return htmlist
def list_of_thumbnails(posts, diary=False):
thumblist = list()
for post in posts:
thumblist.extend(list_of_thumbnails_in_items(post.medias))
if diary is False:
thumblist.extend(list_of_thumbnails_in_items(post.dcim))
return thumblist
def list_of_thumbnails_in_items(itemlist):
thumblist = list()
for item in itemlist:
if type(item) == PostSubdir:
thumblist.append(os.path.basename(item.thumb))
thumblist.extend(list_of_thumbnails_in_items(item.sublist))
else:
thumblist.append(os.path.basename(item.thumb))
return thumblist
def purge_htmlfiles(args, posts):
"""
Purge root dir from irrelevant html files
"""
htmlist = list_of_htmlfiles(args, posts)
html_to_remove = list()
for fullname in glob.glob(os.path.join(args.root, '*.htm*')):
if fullname not in htmlist:
html_to_remove.append(fullname)
if len(html_to_remove) > args.thumbnails.threshold_htmlfiles:
inpt = 'x'
while inpt not in 'yn':
inpt = input(f'{len(html_to_remove)} html files to remove. Continue [y|n]? ').lower()
if inpt == 'n':
return
for name in html_to_remove:
print('Removing html files', name)
os.remove(name)
def purge_thumbnails(args, thumbdir, posts, diary=False):
"""
Purge thumbnail dir from irrelevant thumbnails
"""
thumblist = list_of_thumbnails(posts, diary)
thumbs_to_remove = list()
for fullname in glob.glob(os.path.join(thumbdir, '*.jpg')):
if os.path.basename(fullname) not in thumblist:
thumbs_to_remove.append(fullname)
if len(thumbs_to_remove) > args.thumbnails.threshold_thumbs:
inpt = 'x'
while inpt not in 'yn':
inpt = input(f'{len(thumbs_to_remove)} thumbnails to remove. Continue [y|n]? ').lower()
if inpt == 'n':
return
for name in thumbs_to_remove:
print('Removing thumbnail', name)
os.remove(name)
info_fullname = os.path.splitext(name)[0] + '.info'
if os.path.exists(info_fullname):
os.remove(info_fullname)
# -- List of medias helpers ---------------------------------------------------
def is_media_within_dates(fullname, dates):
if is_media(fullname):
if type(dates) == tuple:
return dates[0] <= date_from_item(fullname) <= dates[1]
else:
return True
else:
return False
def sorted_listdir(filelist):
like_windows_explorer = True
if not filelist:
return filelist
if like_windows_explorer:
maxlen = max(len(os.path.splitext(name)[0]) for name in filelist)
def keyfunc(name):
root, ext = os.path.splitext(name.lower())
return root.ljust(maxlen, ' ') + ext
else:
keyfunc = str.lower
return sorted(filelist, key=keyfunc)
def list_of_files(sourcedir, recursive):
"""
Return the list of full paths for files in source directory
"""
result = list()
if recursive is False:
listdir = sorted_listdir(os.listdir(sourcedir))
if '.nomedia' not in listdir:
for basename in listdir:
result.append(os.path.join(sourcedir, basename))
else:
for root, dirs, files in os.walk(sourcedir):
if '.nomedia' not in files:
for basename in sorted_listdir(files):
result.append(os.path.join(root, basename))
return result
def list_of_medias(args, sourcedir, recursive):
"""
Return the list of full paths for pictures and movies in source directory
"""
files = list_of_files(sourcedir, recursive)
return [_ for _ in files if is_media_within_dates(_, args.dates)]
def list_of_medias_ext(args, sourcedir):
"""
Return the list of full paths for pictures and movies in source directory
plus subdirectories containing media
"""
result = list()
listdir = sorted_listdir(os.listdir(sourcedir))
if '.nomedia' not in listdir:
for basename in listdir:
fullname = os.path.join(sourcedir, basename)
if os.path.isdir(fullname) and basename != '$RECYCLE.BIN' and contains_media(args, fullname):
result.append(fullname)
else:
if is_media_within_dates(fullname, args.dates):
result.append(fullname)
return result
def contains_media(args, dirname):
for root, dirs, files in os.walk(dirname):
if '.nomedia' not in files:
for basename in files:
if is_media_within_dates(os.path.join(root, basename), args.dates):
return True
else:
return False
def dispatch_post_items(list_of_post_items):
subdirs = [_ for _ in list_of_post_items if type(_) is PostSubdir]
medias = [_ for _ in list_of_post_items if type(_) is not PostSubdir]
return subdirs, medias
# -- Creation of gallery element ----------------------------------------------
def create_item(args, media_fullname, sourcedir, thumbdir, key, thumbmax):
if os.path.isfile(media_fullname):
if is_image_file(media_fullname):
return create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax)
else:
return create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax)
else:
return create_item_subdir(args, media_fullname, sourcedir, thumbdir, key, thumbmax)
def create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax):
media_basename = os.path.basename(media_fullname)
media_relname = relative_name(media_fullname, sourcedir)
thumb_basename = thumbname(media_relname, key)
thumb_fullname = os.path.join(thumbdir, thumb_basename)
try:
info, infofmt = get_image_info(media_fullname)
infofmt = media_basename + ': ' + infofmt
thumbsize = size_thumbnail(info[2], info[3], thumbmax)
make_thumbnail_image(args, media_fullname, thumb_fullname, thumbsize)
return PostImage(None, media_fullname, '/'.join((args.thumbrep, thumb_basename)),
thumbsize, infofmt)
except PIL.UnidentifiedImageError:
# corrupted image
warning('Unable to read image', media_fullname)
return None
def create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax):
media_basename = os.path.basename(media_fullname)
media_relname = relative_name(media_fullname, sourcedir)
thumb_basename = thumbname(media_relname, key)
thumb_fullname = os.path.join(thumbdir, thumb_basename)
info_fullname = os.path.splitext(thumb_fullname)[0] + '.info'
try:
info, infofmt = get_video_info(media_fullname, info_fullname)
infofmt = media_basename + ': ' + infofmt
thumbsize = size_thumbnail(info[2], info[3], thumbmax)
make_thumbnail_video(args, media_fullname, thumb_fullname, thumbsize, duration=info[5])
return PostVideo(None, media_fullname, '/'.join((args.thumbrep, thumb_basename)),
thumbsize, infofmt)
except CalledProcessError:
# corrupted video
warning('Unable to read video', media_fullname)
return None
def create_item_subdir(args, media_fullname, sourcedir, thumbdir, key, thumbmax):
media_basename = os.path.basename(media_fullname)
media_relname = relative_name(media_fullname, sourcedir)
thumb_basename = thumbname(media_relname, key)
thumb_fullname = os.path.join(thumbdir, thumb_basename)
info, infofmt = None, None
thumbsize = (thumbmax, int(round(thumbmax / 640 * 480)))
medias_ext = list_of_medias_ext(args, media_fullname)
if not medias_ext:
return None
item = PostSubdir(None, media_fullname, '/'.join((args.thumbrep, thumb_basename)),
thumbsize, infofmt)
item.htmname = os.path.join(os.path.dirname(thumbdir), media_relname + args.html_suffix)
if args.thumbnails.subdir_caption:
item.caption = media_basename
else:
item.caption = ''
_, posts = make_posts(args, media_fullname)
item.posts = posts
items = [item for post in posts for item in post.dcim]
item.sublist = items
make_thumbnail_subdir(args, media_fullname, thumb_fullname, thumbsize, items, thumbdir)
return item
def relative_name(media_fullname, sourcedir):
"""
/Gilles/Dev/journal/tests/subdir/deeper2/deepest/OCT_20000112_000004.jpg
-->
deeper2_deepest_OCT_20000112_000004.jpg
/Gilles/Dev/journal/tests/subdir/deeper2/deepest
-->
deeper2_deepest
"""
x = os.path.relpath(media_fullname, sourcedir)
x = x.replace('\\', '_').replace('/', '_').replace('#', '_')
return x
# -- Creation of posts --------------------------------------------------------
def make_posts(args, dirname):
if args.diary is True:
if not args.sourcedir:
return make_posts_from_diary(args)
else:
return make_posts_from_diary_and_dir(args)
elif args.bydate is False:
return make_posts_from_subdir(args, dirname)
else:
return make_posts_from_subdir_and_date(args, dirname)
def make_posts_from_diary(args):
md_filename = os.path.join(args.root, 'index.md')
if os.path.exists(md_filename):
title, posts = parse_markdown(md_filename)
else:
error('File not found', md_filename)
for post in posts:
for media in post.medias:
media_fullname = os.path.join(args.root, media.uri)
item = create_item(args, media_fullname, args.root, args.thumbdir, 'post', 400)
media.thumb = item.thumb
media.thumbsize = item.thumbsize
media.descr = item.descr
return title, posts
def create_items_by_date(args, medias, posts):
# list of required dates
if args.dates == 'diary':
required_dates = {post.date for post in posts}
else:
required_dates = {date_from_item(media) for media in medias}
if type(args.dates) == tuple:
date1, date2 = args.dates
required_dates = {date for date in required_dates if date1 <= date <= date2}
bydate = defaultdict(list)
for media_fullname in medias:
date = date_from_item(media_fullname)
if date in required_dates:
item = create_item(args, media_fullname, args.sourcedir, args.thumbdir, 'dcim', 300)
if item:
bydate[date].append(item)
for date, liste in bydate.items():
liste.sort(key=lambda item: time_from_item(item.uri))
return bydate
def make_posts_from_diary_and_dir(args):
title, posts = make_posts_from_diary(args)
# list of all pictures and movies
medias = list_of_medias(args, args.sourcedir, args.recursive)
bydate = create_items_by_date(args, medias, posts)
# make list of extra dates (not in posts)
extradates = set(bydate) - {post.date for post in posts}
# complete posts with extra dates
for date in extradates:
post = Post.from_date(date)
post.extra = True
bisect.insort(posts, post)
# several posts can have the same date, only the first one is completed with dcim medias
for post in posts:
if post.date in bydate and post.daterank == 1:
post.dcim = bydate[post.date]
return title, posts
def make_posts_from_subdir(args, dirname):
# list of pictures and movies plus subdirectories
if args.bydir is False:
medias_ext = list_of_medias(args, dirname, args.recursive)
else:
medias_ext = list_of_medias_ext(args, dirname)
#required_dates = get_required_dates(args, medias_ext, posts=None)
#medias_ext_bis = []
#for media in medias_ext:
# if complies_with_required_dates(media):
# medias_ext_bis.append(media)
# complete posts
postmedias = list()
for item in medias_ext:
postmedia = create_item(args, item, args.sourcedir, args.thumbdir, 'dcim', 300)
if postmedia is not None:
postmedias.append(postmedia)
post = Post(date='00000000', text='', medias=[])
post.dcim = postmedias
posts = [post]
title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.sourcedir)[0]
return title, posts
def make_posts_from_subdir_and_date(args, dirname):
# list of all pictures and movies
if args.bydir is False:
medias = list_of_medias(args, dirname, args.recursive)
subdirs = []
else:
medias_ext = list_of_medias_ext(args, dirname)
medias = [_ for _ in medias_ext if is_media(_)]
subdirs = [_ for _ in medias_ext if not is_media(_)]
# create list of posts with a single post containing all subdirs
posts = list()
items = list()
for media_fullname in subdirs:
item = create_item(args, media_fullname, args.sourcedir, args.thumbdir, 'dcim', 300)
if item:
items.append(item)
if items:
post = Post(date='00000000', text='', medias=[])
post.dcim = items
posts.append(post)
bydate = create_items_by_date(args, medias, posts)
# add dates
for date in sorted(bydate):
post = Post.from_date(date)
post.dcim = bydate[post.date]
posts.append(post)
title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.sourcedir)[0]
return title, posts
# -- Creation of html page from directory tree --------------------------------
def create_gallery(args):
title, posts = make_posts(args, args.sourcedir)
print_html(args, posts, title, os.path.join(args.dest, args.rootname), 'regular')
purge_htmlfiles(args, posts)
if args.diary and not args.sourcedir:
purge_thumbnails(args, args.thumbdir, posts, diary=True)
else:
purge_thumbnails(args, args.thumbdir, posts)
# -- Creation of diary from medias --------------------------------------------
def create_diary(args):
# list of all pictures and movies
medias = list_of_medias(args, args.sourcedir, args.recursive)
# list of required dates
if args.dates == 'diary':
assert 0
else:
required_dates = {date_from_item(media) for media in medias}
if type(args.dates) == tuple:
date1, date2 = args.dates
required_dates = {date for date in required_dates if date1 <= date <= date2}
title = args.sourcedir
posts = list()
for date in sorted(required_dates):
posts.append(Post.from_date(date))
os.makedirs(args.root, exist_ok=True)
print_markdown(posts, title, os.path.join(args.root, 'index.md'))
# -- Export to blogger---------------------------------------------------------
def online_images_url(args):
try:
if args.urlblogger.startswith('http:') or args.urlblogger.startswith('https:'):
with urlopen(args.urlblogger) as u:
buffer = u.read()
else:
with open(args.urlblogger, 'rb') as f:
buffer = f.read()
except:
error('Unable to read url', args.urlblogger)
buffer = buffer.decode('utf-8')
online_images = dict()
for match in re.finditer('<div class="separator"((?!<div).)*?</div>', buffer, flags=re.DOTALL):
div_separator = match.group(0)
div_separator = div_separator.replace(' ', '')
elem_div = objectify.fromstring(div_separator)
for elem_a in elem_div.iterchildren(tag='a'):
href = elem_a.get("href")
thumb = elem_a.img.get("src")
online_images[os.path.basename(href)] = (href, thumb)
# video insertion relies only on video order
online_videos = list()
for match in re.finditer('<iframe allowfullscreen="allowfullscreen".*?</iframe>', buffer, flags=re.DOTALL):
iframe = match.group(0)
online_videos.append(iframe)
return online_images, online_videos
def compare_image_buffers(imgbuf1, imgbuf2):
"""
return True if images read on file are identical, False otherwise
"""
with io.BytesIO(imgbuf1) as imgio1, io.BytesIO(imgbuf2) as imgio2:
img1 = Image.open(imgio1)
img2 = Image.open(imgio2)
diff = ImageChops.difference(img1, img2)
return not diff.getbbox()
def check_images(args, posts, online_images):
result = True
for post in posts:
for media in post.medias:
if type(media) is PostImage:
if media.basename in online_images:
with open(os.path.join(args.root, media.uri), 'rb') as f:
imgbuf1 = f.read()
try:
with urlopen(online_images[media.basename][0]) as u:
imgbuf2 = u.read()
except FileNotFoundError:
print('File not found', online_images[media.basename][0])
next
if compare_image_buffers(imgbuf1, imgbuf2) is False:
print('Files are different, upload', media.basename)
else:
if 1:
print('File already online', media.basename)
else:
print('File is absent, upload', media.basename)
result = False
elif type(media) is PostVideo:
# no check for the moment
print('Video not checked', media.basename)
else:
assert False
return result
def compose_blogger_html(args, title, posts, imgdata, online_videos):
""" Compose html with blogger image urls
"""
for post in posts:
for media in post.medias:
if type(media) is PostImage:
if media.uri not in imgdata:
print('Image missing: ', media.uri)
else:
img_url, resized_url = imgdata[media.uri]
media.uri = img_url
media.resized_url = resized_url
elif type(media) is PostVideo:
if not online_videos:
print('Video missing: ', media.uri)
else:
media.iframe = online_videos[0]
del online_videos[0]
else:
assert False
return print_html(args, posts, title, '', target='blogger')
def prepare_for_blogger(args):
"""
Export blogger html to clipboard.
If --full, export complete html, otherwise export html extract ready to
paste into blogger edit mode.
"""
title, posts = parse_markdown(os.path.join(args.root, 'index.md'))
online_images, online_videos = online_images_url(args)
if args.check_images and check_images(args, posts, online_images) is False:
pass
html = compose_blogger_html(args, title, posts, online_images, online_videos)
if args.full is False:
html = re.search('<body>(.*)?</body>', html, flags=re.DOTALL).group(1)
html = re.sub('<script>.*?</script>', '', html, flags=re.DOTALL)
html = STYLE.replace('%%', '%') + html
if args.dest:
with open(args.dest, 'wt', encoding='utf-8') as f:
f.write(html)
else:
clipboard.copy(html)
# -- Other commands -----------------------------------------------------------
def idempotence(args):
"""
For testing identity between a diary file and the fle obtained after reading
and printing it. See testing.
"""
title, posts = parse_markdown(os.path.join(args.root, 'index.md'))
print_markdown(posts, title, os.path.join(args.dest, 'index.md'))
# -- Configuration file ------------------------------------------------------
# The following docstring is used to create the configuration file.
CONFIG_DEFAULTS = """\
[source]
; source directory
; value: valid path
sourcedir = .
; one web page per directory
; value: true or false
bydir = false
; dispatch medias by dates
; value: true or false
bydate = false
; include text and medias from diary file
; value: true or false
diary = false
; include subdirectories recursively (used when bydir is false)
; value: true or false
recursive = false
; interval of dates to include
; value: source|diary|yyyymmdd-yyyymmdd or empty (= source)
dates =
; github Pages compatibility (.htlml extension and no dot in directory names)
; value: true or false
github_pages = false
[thumbnails]
; specifies whether or not the gallery displays media description (size, dimension, etc)
; value: true or false
media_description = true
; specifies whether subdir captions are empty or the name of the subdir
; value: true or false
subdir_caption = true
; timestamp of thumbnail in video
; value: number of seconds
thumbdelay = 5
; maximum number of thumbnails to remove without user confirmation
; value: integer
threshold_thumbs = 10
[photobox]
; Allows to navigate between first and last images
; value: true or false
loop = false
; Show gallery thumbnails below the presented photo
; value: true or false
thumbs = true
; Should autoplay on first time or not
; value: true or false
autoplay = false
; Autoplay interval (less than 1000 will hide the autoplay button)
; value: milliseconds
time = 3000
; Disable/enable mousewheel image zooming
; value: true or false
zoomable = true
; Allow rotation of the image
; value: true or false
rotatable = true
; Change image using mousewheel left/right
; value: true or false
wheelNextPrev = true
"""
class MyConfigParser (ConfigParser):
"""Add input checking."""
def __init__(self):
ConfigParser.__init__(self, inline_comment_prefixes=(';',))
def error(self, section, entry):
error('Missing or incorrect config value:', '[%s]%s' % (section, entry))
def getint(self, section, entry, default=None):
try:
if default is None:
return ConfigParser.getint(self, section, entry)
else:
return ConfigParser.getint(self, section, entry, raw=True, vars=None, fallback=default)
except Exception as e:
print(e)
self.error(section, entry)
def getboolean(self, section, entry, default=None):
try:
if default is None:
return ConfigParser.getboolean(self, section, entry)
else:
return ConfigParser.getboolean(self, section, entry, raw=True, vars=None, fallback=default)
except Exception as e:
print(e)
self.error(section, entry)
def configfilename(params):
return os.path.join(params.root, '.config.ini')
def createconfig(config_filename):
with open(config_filename, 'wt') as f:
f.writelines(CONFIG_DEFAULTS)
def read_config(params):
config_filename = configfilename(params)
try:
if not os.path.exists(config_filename) or params.resetcfg:
createconfig(config_filename)
except:
error('Error creating configuration file')
try:
getconfig(params, config_filename)
except Exception as e:
error('Error reading configuration file.', str(e), 'Use --resetcfg')
def getconfig(options, config_filename):
class Section:
pass
options.source = Section()
options.thumbnails = Section()
options.photobox = Section()
config = MyConfigParser()
config.read(config_filename)
# [source]
options.source.sourcedir = config.get('source', 'sourcedir')
options.source.bydir = config.getboolean('source', 'bydir')
options.source.bydate = config.getboolean('source', 'bydate')
options.source.diary = config.getboolean('source', 'diary')
options.source.recursive = config.getboolean('source', 'recursive')
options.source.dates = config.get('source', 'dates')
options.source.github_pages = config.getboolean('source', 'github_pages', default=False)
# [thumbnails]
options.thumbnails.media_description = config.getboolean('thumbnails', 'media_description')
options.thumbnails.subdir_caption = config.getboolean('thumbnails', 'subdir_caption')
options.thumbnails.thumbdelay = config.getint('thumbnails', 'thumbdelay')
options.thumbnails.threshold_thumbs = config.getint('thumbnails', 'threshold_thumbs')
options.thumbnails.threshold_htmlfiles = config.getint('thumbnails', 'threshold_htmlfiles', default=3)
# [photobox]
options.photobox.loop = config.getboolean('photobox', 'loop')
options.photobox.thumbs = config.getboolean('photobox', 'thumbs')
options.photobox.autoplay = config.getboolean('photobox', 'autoplay')
options.photobox.time = config.getint('photobox', 'time')
options.photobox.zoomable = config.getboolean('photobox', 'zoomable')
options.photobox.rotatable = config.getboolean('photobox', 'rotatable')
options.photobox.wheelNextPrev = config.getboolean('photobox', 'wheelNextPrev')
def setconfig(cfgname, section, key, value):
config = MyConfigParser()
config.read(cfgname)
config.set(section, key, value)
with open(cfgname, 'wt') as configfile:
config.write(configfile)
def setconfig_cmd(args):
config_filename = configfilename(args)
setconfig(config_filename, *args.setcfg)
def update_config(args):
# update only entries which can be modified from the command line (source section)
updates = (
('sourcedir', args.sourcedir),
('bydir', BOOL[args.bydir]),
('bydate', BOOL[args.bydate]),
('diary', BOOL[args.diary]),
('recursive', BOOL[args.recursive]),
('dates', args.dates),
('github_pages', BOOL[args.github_pages]),
)
# manual update to keep comments
cfgname = configfilename(args)
with open(cfgname) as f:
cfglines = [_.strip() for _ in f.readlines()]
for key, value in updates:
for iline, line in enumerate(cfglines):
if line.startswith(key):
cfglines[iline] = f'{key} = {value}'
break
with open(cfgname, 'wt') as f:
for line in cfglines:
print(line, file=f)
# -- Error handling -----------------------------------------------------------
def warning(*msg):
print(colorama.Fore.YELLOW + colorama.Style.BRIGHT +
' '.join(msg),
colorama.Style.RESET_ALL)
# Every error message error must be declared here to give a return code to the error
ERRORS = '''\
File not found
Directory not found
No date in post
Incorrect date value:
Posts are not ordered
Unable to read url
No image source (--sourcedir)
No blogger url (--url)
Missing or incorrect config value:
Error creating configuration file
Error reading configuration file.
Incorrect date format
Incorrect parameters:
'''
def errorcode(msg):
return ERRORS.splitlines().index(msg) + 1
def error(*msg):
print(colorama.Fore.RED + colorama.Style.BRIGHT +
' '.join(msg),
colorama.Style.RESET_ALL)
sys.exit(errorcode(msg[0]))
# -- Main ---------------------------------------------------------------------
BOOL = ('false', 'true')
def parse_command_line(argstring):
parser = argparse.ArgumentParser(description=None, usage=USAGE)
agroup = parser.add_argument_group('Commands')
xgroup = agroup.add_mutually_exclusive_group()
xgroup.add_argument('--gallery', help='source in --sourcedir',
action='store', metavar='<root-dir>')
agroup.add_argument('--update', help='updates gallery with parameters in config file',
action='store', metavar='<root-dir>')
xgroup.add_argument('--create', help='create journal from medias in --sourcedir',
action='store', metavar='<root-dir>')
# testing
xgroup.add_argument('--resetcfg', help='reset config file to defaults',
action='store', metavar='<root-dir>')
xgroup.add_argument('--setcfg', help=argparse.SUPPRESS,
action='store', nargs=4, metavar='<root-dir>')
xgroup.add_argument('--idem', help=argparse.SUPPRESS,
action='store', metavar='<root-dir>')
# blogger
xgroup.add_argument('--blogger',
help='input md, html blogger ready in clipboard',
action='store', metavar='<root-dir>')
agroup = parser.add_argument_group('Parameters')
agroup.add_argument('--bydir', help='organize gallery by subdirectory',
action='store', default=None, choices=BOOL)
agroup.add_argument('--bydate', help='organize gallery by date',
action='store', default=None, choices=BOOL)
agroup.add_argument('--diary', help='organize gallery using markdown file diary',
action='store', default=None, choices=BOOL)
agroup.add_argument('--recursive', help='--sourcedir scans recursively',
action='store', default=None, choices=BOOL)
agroup.add_argument('--dates', help='dates interval',
action='store', default=None)
agroup.add_argument('--sourcedir', help='media directory',
action='store', default=None)
agroup.add_argument('--github_pages', help='github Pages compatibility',
action='store', default=None, choices=BOOL)
agroup.add_argument('--dest', help='output directory',
action='store')
agroup.add_argument('--forcethumb', help='force calculation of thumbnails',
action='store_true', default=False)
agroup.add_argument('--full', help='full html (versus blogger ready html)',
action='store_true', default=False)
agroup.add_argument('--check', dest='check_images', help='check availability of medias on blogger',
action='store_true')
agroup.add_argument('--url', dest='urlblogger', help='blogger post url',
action='store')
if argstring is None:
print('Type "galerie -h" for help')
sys.exit(1)
else:
args = parser.parse_args(argstring.split())
if args.update and (args.bydir or args.bydate or args.diary or args.sourcedir or
args.recursive or args.dates or args.github_pages):
error('Incorrect parameters:',
'--update cannot be used with creation parameters, use explicit command')
args.bydir = args.bydir == 'true'
args.bydate = args.bydate == 'true'
args.diary = args.diary == 'true'
args.recursive = args.recursive == 'true'
args.dates = 'source' if (args.dates is None) else args.dates
args.github_pages = args.github_pages == 'true'
args.root = (
args.create or args.gallery or args.update
or args.blogger or args.idem or args.resetcfg
)
if args.setcfg:
args.root = args.setcfg[0]
args.setcfg = args.setcfg[1:]
return args
def setup_part1(args):
"""
Made before reading config file (config file located in args.root).
Check and normalize root path.
"""
args.rootarg = args.root
rootext = os.path.splitext(args.rootarg)[1]
if rootext == '':
pass
else:
args.root = os.path.dirname(args.root)
if args.root:
args.root = os.path.abspath(args.root)
if not os.path.isdir(args.root):
if args.gallery:
os.mkdir(args.root)
else:
error('Directory not found', args.root)
def setup_part2(args):
"""
Made after reading config file.
Check for ffmpeg in path.
Create .thumbnails dir if necessary and create .nomedia in it.
Copy photobox file to destination dir.
Handle priority between command line and config file.
"""
if args.update:
args.sourcedir = args.source.sourcedir
args.bydir = args.source.bydir
args.bydate = args.source.bydate
args.diary = args.source.diary
args.recursive = args.source.recursive
args.dates = args.source.dates
args.github_pages = args.source.github_pages
elif args.gallery:
args.source.sourcedir = args.sourcedir
args.source.bydir = args.bydir
args.source.bydate = args.bydate
args.source.diary = args.diary
args.source.recursive = args.recursive
args.source.dates = args.dates
args.source.github_pages = args.github_pages
update_config(args)
if args.github_pages:
args.html_suffix = '.html'
else:
args.html_suffix = '.htm'
rootext = os.path.splitext(args.rootarg)[1]
if rootext:
args.rootname = os.path.basename(args.rootarg)
else:
args.rootname = 'index' + args.html_suffix
if args.sourcedir:
args.sourcedir = os.path.abspath(args.sourcedir)
if os.path.splitdrive(args.sourcedir)[0]:
drive, rest = os.path.splitdrive(args.sourcedir)
args.sourcedir = drive.upper() + rest
if not os.path.isdir(args.sourcedir):
error('Directory not found', args.sourcedir)
else:
if args.gallery and args.diary is False and args.update is None:
error('Directory not found', 'Use --sourcedir')
if args.dest:
args.dest = os.path.abspath(args.dest)
if args.dest is None:
args.dest = args.root
if args.blogger and args.urlblogger is None:
error('No blogger url (--url)')
if args.gallery or args.update:
# check for ffmpeg and ffprobe in path
for exe in ('ffmpeg', 'ffprobe'):
try:
check_output([exe, '-version'])
except FileNotFoundError:
error('File not found', exe)
if args.github_pages:
args.thumbrep = 'thumbnails'
else:
args.thumbrep = '.thumbnails'
args.thumbdir = os.path.join(args.dest, args.thumbrep)
if not os.path.exists(args.thumbdir):
os.mkdir(args.thumbdir)
open(os.path.join(args.thumbdir, '.nomedia'), 'a').close()
favicondst = os.path.join(args.dest, 'favicon.ico')
if not os.path.isfile(favicondst):
faviconsrc = os.path.join(os.path.dirname(__file__), 'favicon.ico')
shutil.copyfile(faviconsrc, favicondst)
photoboxdir = os.path.join(args.dest, 'photobox')
if not os.path.exists(photoboxdir):
photoboxsrc = os.path.join(os.path.dirname(__file__), 'photobox')
shutil.copytree(photoboxsrc, photoboxdir)
if args.dates:
if not(args.gallery or args.create):
# silently ignored for the moment, otherwise all other commands will
# launch a wanrning or an error on the default --dates value
pass
if args.dates == 'source':
pass
elif args.dates == 'diary':
if args.create:
error('Incorrect date format', args.dates)
elif re.match(r'\d+-\d+', args.dates):
date1, date2 = args.dates.split('-')
if validate_date(date1) and validate_date(date2):
args.dates = date1, date2
else:
error('Incorrect date format', args.dates)
else:
error('Incorrect date format', args.dates)
def main(argstring=None):
colorama.init()
args = parse_command_line(argstring)
setup_part1(args)
read_config(args)
setup_part2(args)
try:
if args.gallery or args.update:
create_gallery(args)
elif args.create:
create_diary(args)
elif args.blogger:
prepare_for_blogger(args)
elif args.idem:
idempotence(args)
elif args.setcfg:
setconfig_cmd(args)
except KeyboardInterrupt:
warning('Interrupted by user.')
if __name__ == '__main__':
main(' '.join(sys.argv[1:]))
|
normal
|
{
"blob_id": "6018f35afc6646d0302ca32de649ffe7d544a765",
"index": 3377,
"step-1": "<mask token>\n\n\nclass Post:\n\n def __init__(self, date, text, medias):\n self.date = date\n self.text = text\n self.medias = medias\n self.dcim = []\n self.daterank = 0\n self.extra = False\n\n def __lt__(self, other):\n return self.date < other.date\n\n @classmethod\n def from_markdown(cls, post):\n m = re.match('\\\\[(\\\\d\\\\d\\\\d\\\\d/\\\\d\\\\d/\\\\d\\\\d)\\\\]\\\\n*', post[0])\n if m:\n date = m.group(1).replace('/', '')\n if not validate_date(date):\n error('Incorrect date value:', date)\n del post[0]\n else:\n error('No date in post', ' '.join(post))\n while post and not post[0].strip():\n del post[0]\n text = ''\n while post and not re.match('!?\\\\[\\\\]', post[0]):\n text += post[0]\n del post[0]\n text = re.sub('\\\\n\\\\n$', '\\n', text)\n medias = list()\n while post and (match := re.match('!?\\\\[\\\\]\\\\((.*)\\\\)', post[0])):\n media = match.group(1)\n caption = None\n del post[0]\n if post and not re.match('!?\\\\[\\\\]', post[0]):\n caption = post[0].strip()\n del post[0]\n if match.group(0)[0] == '!':\n medias.append(PostImage(caption, media))\n else:\n medias.append(PostVideo(caption, media))\n return cls(date, text, medias)\n\n @classmethod\n def from_date(cls, date):\n dt = datetime.datetime.strptime(date, '%Y%m%d')\n datetext = dt.strftime('%A %d %B %Y').capitalize()\n post = cls(date, text=datetext, medias=[])\n post.daterank = 1\n return post\n\n def to_html(self, args, target='regular'):\n if target == 'regular':\n if args.diary:\n return self.to_html_diary(args)\n else:\n return self.to_html_regular(args)\n if target == 'blogger':\n return self.to_html_blogger()\n\n def to_html_regular(self, args):\n html = list()\n if self.text:\n html.append(markdown.markdown(self.text))\n subdirs, dcim = dispatch_post_items(self.dcim)\n if self.dcim:\n html.append(SEP)\n for media in subdirs:\n html.append(media.to_html_dcim(args))\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n return html\n\n def to_html_diary(self, args):\n html = list()\n if self.extra:\n html.append('<div class=\"extra\">')\n if self.text:\n html.append(markdown.markdown(self.text))\n if self.medias:\n html.append(f'<div id=\"gallery-blog-{self.date}-{self.daterank}\">')\n for media in self.medias:\n html.append(media.to_html_post(args))\n html.append('</div>')\n _, dcim = dispatch_post_items(self.dcim)\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n html.append(SEP)\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n if self.extra:\n html.append('</div>')\n return html\n\n def to_html_blogger(self):\n html = list()\n html.append(markdown.markdown(self.text))\n for image in self.medias:\n html.append(image.to_html_blogger())\n html.append(SEP)\n return html\n\n\nclass PostItem:\n\n def __init__(self, caption, uri, thumb=None, thumbsize=None, descr=''):\n self.caption = caption\n self.uri = uri\n self.basename = os.path.basename(uri)\n self.thumb = thumb\n self.thumbsize = thumbsize\n self.descr = descr\n self.resized_url = None\n\n\nclass PostImage(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '' % (self.uri,)\n else:\n return '\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return IMGPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return IMGPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return IMGDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n if not self.caption:\n return BIMGPAT % (self.uri, self.resized_url)\n else:\n return f'{BIMGPAT}\\n{CAPTION_PAT}' % (self.uri, self.\n resized_url, self.caption)\n\n\nclass PostVideo(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '[](%s)' % (self.uri,)\n else:\n return '[](%s)\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return VIDPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return VIDPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return VIDDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n x = f'<p style=\"text-align: center;\">{self.iframe}</p>'\n if not self.caption:\n return x\n else:\n return f'%s\\n{CAPTION_PAT}' % (x, self.caption)\n\n\nclass PostSubdir(PostItem):\n\n def to_html_dcim(self, args):\n basename = os.path.basename(self.htmname)\n posts = self.posts\n title = self.caption\n print_html(args, posts, title, self.htmname)\n if not self.caption:\n return DIRPOST % (basename, self.thumb, *self.thumbsize)\n else:\n return DIRPOSTCAPTION % (basename, self.thumb, *self.thumbsize,\n self.caption)\n\n\n<mask token>\n\n\ndef parse_markdown(filename):\n \"\"\"\n Generate Post objects from markdown. Date must be present in each post and\n posts must be ordrered by date.\n \"\"\"\n if not os.path.exists(filename):\n error('File not found', filename)\n posts = list()\n with open(filename, encoding='utf-8') as f:\n line = next(f)\n if line.startswith('# '):\n title = line[2:].strip()\n record = []\n next(f)\n else:\n title = None\n record = [line]\n for line in f:\n if not line.startswith('___'):\n record.append(line)\n else:\n posts.append(Post.from_markdown(record))\n record = []\n daterank = defaultdict(int)\n for post in posts:\n daterank[post.date] += 1\n post.daterank = daterank[post.date]\n for post1, post2 in zip(posts[:-1], posts[1:]):\n if post1.date > post2.date:\n error('Posts are not ordered', f'{post1.date} > {post2.date}')\n return title, posts\n\n\ndef print_markdown(posts, title, fullname):\n with open(fullname, 'wt', encoding='utf-8') as fdst:\n print(f'# {title}\\n', file=fdst)\n for post in posts:\n date = f'[{post.date[0:4]}/{post.date[4:6]}/{post.date[6:8]}]'\n print(date, file=fdst)\n if post.text:\n print(file=fdst)\n for line in post.text.splitlines():\n if not line:\n print(file=fdst)\n else:\n for chunk in textwrap.wrap(line, width=78):\n print(chunk, file=fdst)\n if post.medias:\n print(file=fdst)\n for media in post.medias:\n print(media.to_markdown(), file=fdst)\n print('______', file=fdst)\n\n\n<mask token>\n\n\ndef is_image_file(name):\n return os.path.splitext(name)[1].lower() in ('.jpg', '.jpeg', '.png',\n '.gif', '.bmp', '.webp', '.tif')\n\n\n<mask token>\n\n\ndef is_media(name):\n return is_image_file(name) or is_video_file(name)\n\n\n<mask token>\n\n\ndef date_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})(?:\\\\D|$)', name, re.ASCII)):\n digits = match.group(1)\n if validate_date(digits):\n return digits\n return None\n\n\ndef date_from_item(filename):\n if (date := date_from_name(filename)):\n return date\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%Y%m%d')\n\n\ndef time_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})\\\\D(\\\\d{6})(?:\\\\D|$)', name,\n re.ASCII)):\n digits = match.group(2)\n hour, minute, second = int(digits[0:2]), int(digits[2:4]), int(digits\n [4:6])\n if 0 <= hour < 24 and 0 <= minute < 60 and 0 <= second < 60:\n return digits\n return None\n\n\ndef time_from_item(filename):\n if (time := time_from_name(filename)):\n return time\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%H%M%S')\n\n\n<mask token>\n\n\ndef get_image_info(filename):\n date = date_from_item(filename)\n time = time_from_item(filename)\n img = Image.open(filename)\n width, height = img.size\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n return (date, time, width, height, size\n ), f'{date} {time}, dim={width}x{height}, {size} MB'\n\n\ndef get_video_info(filename, info_fullname):\n if os.path.exists(info_fullname):\n with open(info_fullname) as f:\n info = f.readline().split()\n date, time, width, height, size, duration, fps = info[0], info[1], int(\n info[2]), int(info[3]), float(info[4]), int(info[5]), float(info[6]\n )\n formatted_info = format_video_info(date, time, width, height, size,\n duration, fps)\n return (date, time, width, height, size, duration, fps), formatted_info\n else:\n info, formatted_info = make_video_info(filename, info_fullname)\n with open(info_fullname, 'wt') as f:\n print(' '.join([str(_) for _ in info]), file=f)\n return info, formatted_info\n\n\ndef make_video_info(filename, info_fullname):\n date = date_from_item(filename)\n time = time_from_item(filename)\n command = [*FFPROBE_CMD.split(), filename]\n try:\n output = check_output(command, stderr=STDOUT).decode()\n width, height, fps, duration = parse_ffprobe_output(output)\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n output = format_video_info(date, time, width, height, size,\n duration, fps)\n except CalledProcessError as e:\n output = e.output.decode()\n warning(output)\n raise\n return (date, time, width, height, size, duration, fps), output\n\n\ndef parse_ffprobe_output(ffprobe_output):\n match = re.match(\n '(\\\\d+),(\\\\d+),(\\\\d+)/(\\\\d+),(\\\\d+/\\\\d+).*\\\\s(\\\\d+\\\\.\\\\d+)',\n ffprobe_output, re.DOTALL)\n width = int(match.group(1))\n height = int(match.group(2))\n fps = round(int(match.group(3)) / int(match.group(4)), 1)\n duration = round(float(match.group(6)))\n return width, height, fps, duration\n\n\ndef format_video_info(date, time, width, height, size, duration, fps):\n return (\n f'{date} {time}, dim={width}x{height}, {format_duration(duration)}, fps={fps}, {size} MB'\n )\n\n\n<mask token>\n\n\ndef size_thumbnail(width, height, maxdim):\n if width >= height:\n return maxdim, int(round(maxdim * height / width))\n else:\n return int(round(maxdim * width / height)), maxdim\n\n\ndef make_thumbnail_image(args, image_name, thumb_name, size):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_image(image_name, thumb_name, size)\n\n\ndef create_thumbnail_image(image_name, thumb_name, size):\n imgobj = Image.open(image_name)\n if imgobj.mode != 'RGBA' and image_name.endswith('.jpg') and not (\n image_name.endswith('.gif') and imgobj.info.get('transparency')):\n imgobj = imgobj.convert('RGBA')\n imgobj.thumbnail(size, Image.LANCZOS)\n imgobj = imgobj.convert('RGB')\n imgobj.save(thumb_name)\n\n\ndef make_thumbnail_video(args, video_name, thumb_name, size, duration):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_video(args, video_name, thumb_name, size, duration)\n\n\n<mask token>\n\n\ndef create_thumbnail_video(args, filename, thumbname, size, duration):\n delay = min(duration - 1, args.thumbnails.thumbdelay)\n sizearg = '%dx%d' % size\n command = (\n 'ffmpeg -y -v error -itsoffset -%d -i \"%s\" -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s \"%s\"'\n )\n command = command % (delay, filename, sizearg, thumbname)\n result = os.system(command)\n try:\n img1 = Image.open(thumbname)\n except:\n warning('Unable to save thumbnail for', filename)\n return\n img2 = Image.open(io.BytesIO(base64.b64decode(VIDEO_ICON)))\n width, height = img1.size\n img1.paste(img2, (6, height - 20 - 6), None)\n img1.save(thumbname)\n\n\n<mask token>\n\n\ndef create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir):\n\n def size_thumbnail(width, height, xmax, ymax):\n width2 = xmax\n height2 = int(round(xmax * height / width))\n if height2 < ymax:\n width2 = int(round(ymax * width / height))\n height2 = ymax\n return width2, height2\n thumblist = [os.path.basename(item.thumb) for item in items]\n widthnum, heightnum, width, height, offsetx, offsety = mosaic_geometry(size\n , thumblist)\n thumbnum = widthnum * heightnum\n img = Image.new('RGB', size, SUBDIR_BACKCOL)\n for ind, thumb in enumerate(thumblist[:min(thumbnum, len(thumblist))]):\n row = ind // widthnum\n col = ind % widthnum\n img2 = Image.open(os.path.join(thumbdir, thumb))\n w, h = size_thumbnail(*img2.size, width[col], height[row])\n cropdim = (w - width[col]) // 2, (h - height[row]) // 2, (w - width\n [col]) // 2 + width[col], (h - height[row]) // 2 + height[row]\n img2 = img2.resize((w, h), Image.LANCZOS)\n img2 = img2.crop(cropdim)\n img.paste(img2, (offsetx[col], offsety[row]))\n if os.path.exists(thumb_name):\n imgref = Image.open(thumb_name)\n byteio = io.BytesIO()\n img.save(byteio, 'JPEG')\n byteio.seek(0)\n imgnew = Image.open(byteio)\n diff = ImageChops.difference(imgnew, imgref)\n if diff.getbbox() is None:\n return\n img.save(thumb_name)\n\n\n<mask token>\n\n\ndef list_of_htmlfiles_in_items(itemlist):\n htmlist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n htmlist.append(item.htmname)\n htmlist.extend(list_of_htmlfiles_in_items(item.sublist))\n return htmlist\n\n\ndef list_of_thumbnails(posts, diary=False):\n thumblist = list()\n for post in posts:\n thumblist.extend(list_of_thumbnails_in_items(post.medias))\n if diary is False:\n thumblist.extend(list_of_thumbnails_in_items(post.dcim))\n return thumblist\n\n\ndef list_of_thumbnails_in_items(itemlist):\n thumblist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n thumblist.append(os.path.basename(item.thumb))\n thumblist.extend(list_of_thumbnails_in_items(item.sublist))\n else:\n thumblist.append(os.path.basename(item.thumb))\n return thumblist\n\n\ndef purge_htmlfiles(args, posts):\n \"\"\"\n Purge root dir from irrelevant html files\n \"\"\"\n htmlist = list_of_htmlfiles(args, posts)\n html_to_remove = list()\n for fullname in glob.glob(os.path.join(args.root, '*.htm*')):\n if fullname not in htmlist:\n html_to_remove.append(fullname)\n if len(html_to_remove) > args.thumbnails.threshold_htmlfiles:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(html_to_remove)} html files to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in html_to_remove:\n print('Removing html files', name)\n os.remove(name)\n\n\ndef purge_thumbnails(args, thumbdir, posts, diary=False):\n \"\"\"\n Purge thumbnail dir from irrelevant thumbnails\n \"\"\"\n thumblist = list_of_thumbnails(posts, diary)\n thumbs_to_remove = list()\n for fullname in glob.glob(os.path.join(thumbdir, '*.jpg')):\n if os.path.basename(fullname) not in thumblist:\n thumbs_to_remove.append(fullname)\n if len(thumbs_to_remove) > args.thumbnails.threshold_thumbs:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(thumbs_to_remove)} thumbnails to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in thumbs_to_remove:\n print('Removing thumbnail', name)\n os.remove(name)\n info_fullname = os.path.splitext(name)[0] + '.info'\n if os.path.exists(info_fullname):\n os.remove(info_fullname)\n\n\ndef is_media_within_dates(fullname, dates):\n if is_media(fullname):\n if type(dates) == tuple:\n return dates[0] <= date_from_item(fullname) <= dates[1]\n else:\n return True\n else:\n return False\n\n\ndef sorted_listdir(filelist):\n like_windows_explorer = True\n if not filelist:\n return filelist\n if like_windows_explorer:\n maxlen = max(len(os.path.splitext(name)[0]) for name in filelist)\n\n def keyfunc(name):\n root, ext = os.path.splitext(name.lower())\n return root.ljust(maxlen, ' ') + ext\n else:\n keyfunc = str.lower\n return sorted(filelist, key=keyfunc)\n\n\n<mask token>\n\n\ndef list_of_medias(args, sourcedir, recursive):\n \"\"\"\n Return the list of full paths for pictures and movies in source directory\n \"\"\"\n files = list_of_files(sourcedir, recursive)\n return [_ for _ in files if is_media_within_dates(_, args.dates)]\n\n\n<mask token>\n\n\ndef dispatch_post_items(list_of_post_items):\n subdirs = [_ for _ in list_of_post_items if type(_) is PostSubdir]\n medias = [_ for _ in list_of_post_items if type(_) is not PostSubdir]\n return subdirs, medias\n\n\ndef create_item(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n if os.path.isfile(media_fullname):\n if is_image_file(media_fullname):\n return create_item_image(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_video(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_subdir(args, media_fullname, sourcedir, thumbdir,\n key, thumbmax)\n\n\ndef create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n try:\n info, infofmt = get_image_info(media_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_image(args, media_fullname, thumb_fullname, thumbsize)\n return PostImage(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except PIL.UnidentifiedImageError:\n warning('Unable to read image', media_fullname)\n return None\n\n\ndef create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n info_fullname = os.path.splitext(thumb_fullname)[0] + '.info'\n try:\n info, infofmt = get_video_info(media_fullname, info_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_video(args, media_fullname, thumb_fullname,\n thumbsize, duration=info[5])\n return PostVideo(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except CalledProcessError:\n warning('Unable to read video', media_fullname)\n return None\n\n\n<mask token>\n\n\ndef relative_name(media_fullname, sourcedir):\n \"\"\"\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest/OCT_20000112_000004.jpg\n -->\n deeper2_deepest_OCT_20000112_000004.jpg\n\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest\n -->\n deeper2_deepest\n \"\"\"\n x = os.path.relpath(media_fullname, sourcedir)\n x = x.replace('\\\\', '_').replace('/', '_').replace('#', '_')\n return x\n\n\n<mask token>\n\n\ndef make_posts_from_diary(args):\n md_filename = os.path.join(args.root, 'index.md')\n if os.path.exists(md_filename):\n title, posts = parse_markdown(md_filename)\n else:\n error('File not found', md_filename)\n for post in posts:\n for media in post.medias:\n media_fullname = os.path.join(args.root, media.uri)\n item = create_item(args, media_fullname, args.root, args.\n thumbdir, 'post', 400)\n media.thumb = item.thumb\n media.thumbsize = item.thumbsize\n media.descr = item.descr\n return title, posts\n\n\n<mask token>\n\n\ndef make_posts_from_subdir(args, dirname):\n if args.bydir is False:\n medias_ext = list_of_medias(args, dirname, args.recursive)\n else:\n medias_ext = list_of_medias_ext(args, dirname)\n postmedias = list()\n for item in medias_ext:\n postmedia = create_item(args, item, args.sourcedir, args.thumbdir,\n 'dcim', 300)\n if postmedia is not None:\n postmedias.append(postmedia)\n post = Post(date='00000000', text='', medias=[])\n post.dcim = postmedias\n posts = [post]\n title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.\n sourcedir)[0]\n return title, posts\n\n\n<mask token>\n\n\ndef create_gallery(args):\n title, posts = make_posts(args, args.sourcedir)\n print_html(args, posts, title, os.path.join(args.dest, args.rootname),\n 'regular')\n purge_htmlfiles(args, posts)\n if args.diary and not args.sourcedir:\n purge_thumbnails(args, args.thumbdir, posts, diary=True)\n else:\n purge_thumbnails(args, args.thumbdir, posts)\n\n\ndef create_diary(args):\n medias = list_of_medias(args, args.sourcedir, args.recursive)\n if args.dates == 'diary':\n assert 0\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <=\n date <= date2}\n title = args.sourcedir\n posts = list()\n for date in sorted(required_dates):\n posts.append(Post.from_date(date))\n os.makedirs(args.root, exist_ok=True)\n print_markdown(posts, title, os.path.join(args.root, 'index.md'))\n\n\n<mask token>\n\n\ndef compare_image_buffers(imgbuf1, imgbuf2):\n \"\"\"\n return True if images read on file are identical, False otherwise\n \"\"\"\n with io.BytesIO(imgbuf1) as imgio1, io.BytesIO(imgbuf2) as imgio2:\n img1 = Image.open(imgio1)\n img2 = Image.open(imgio2)\n diff = ImageChops.difference(img1, img2)\n return not diff.getbbox()\n\n\ndef check_images(args, posts, online_images):\n result = True\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.basename in online_images:\n with open(os.path.join(args.root, media.uri), 'rb') as f:\n imgbuf1 = f.read()\n try:\n with urlopen(online_images[media.basename][0]) as u:\n imgbuf2 = u.read()\n except FileNotFoundError:\n print('File not found', online_images[media.\n basename][0])\n next\n if compare_image_buffers(imgbuf1, imgbuf2) is False:\n print('Files are different, upload', media.basename)\n elif 1:\n print('File already online', media.basename)\n else:\n print('File is absent, upload', media.basename)\n result = False\n elif type(media) is PostVideo:\n print('Video not checked', media.basename)\n else:\n assert False\n return result\n\n\n<mask token>\n\n\ndef idempotence(args):\n \"\"\"\n For testing identity between a diary file and the fle obtained after reading\n and printing it. See testing.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n print_markdown(posts, title, os.path.join(args.dest, 'index.md'))\n\n\n<mask token>\n\n\nclass MyConfigParser(ConfigParser):\n \"\"\"Add input checking.\"\"\"\n\n def __init__(self):\n ConfigParser.__init__(self, inline_comment_prefixes=(';',))\n\n def error(self, section, entry):\n error('Missing or incorrect config value:', '[%s]%s' % (section, entry)\n )\n\n def getint(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getint(self, section, entry)\n else:\n return ConfigParser.getint(self, section, entry, raw=True,\n vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n def getboolean(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getboolean(self, section, entry)\n else:\n return ConfigParser.getboolean(self, section, entry, raw=\n True, vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n\ndef configfilename(params):\n return os.path.join(params.root, '.config.ini')\n\n\ndef createconfig(config_filename):\n with open(config_filename, 'wt') as f:\n f.writelines(CONFIG_DEFAULTS)\n\n\ndef read_config(params):\n config_filename = configfilename(params)\n try:\n if not os.path.exists(config_filename) or params.resetcfg:\n createconfig(config_filename)\n except:\n error('Error creating configuration file')\n try:\n getconfig(params, config_filename)\n except Exception as e:\n error('Error reading configuration file.', str(e), 'Use --resetcfg')\n\n\n<mask token>\n\n\ndef setconfig_cmd(args):\n config_filename = configfilename(args)\n setconfig(config_filename, *args.setcfg)\n\n\ndef update_config(args):\n updates = ('sourcedir', args.sourcedir), ('bydir', BOOL[args.bydir]), (\n 'bydate', BOOL[args.bydate]), ('diary', BOOL[args.diary]), ('recursive'\n , BOOL[args.recursive]), ('dates', args.dates), ('github_pages',\n BOOL[args.github_pages])\n cfgname = configfilename(args)\n with open(cfgname) as f:\n cfglines = [_.strip() for _ in f.readlines()]\n for key, value in updates:\n for iline, line in enumerate(cfglines):\n if line.startswith(key):\n cfglines[iline] = f'{key} = {value}'\n break\n with open(cfgname, 'wt') as f:\n for line in cfglines:\n print(line, file=f)\n\n\ndef warning(*msg):\n print(colorama.Fore.YELLOW + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n\n\n<mask token>\n\n\ndef errorcode(msg):\n return ERRORS.splitlines().index(msg) + 1\n\n\ndef error(*msg):\n print(colorama.Fore.RED + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n sys.exit(errorcode(msg[0]))\n\n\n<mask token>\n\n\ndef setup_part1(args):\n \"\"\"\n Made before reading config file (config file located in args.root).\n Check and normalize root path.\n \"\"\"\n args.rootarg = args.root\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext == '':\n pass\n else:\n args.root = os.path.dirname(args.root)\n if args.root:\n args.root = os.path.abspath(args.root)\n if not os.path.isdir(args.root):\n if args.gallery:\n os.mkdir(args.root)\n else:\n error('Directory not found', args.root)\n\n\ndef setup_part2(args):\n \"\"\"\n Made after reading config file.\n Check for ffmpeg in path.\n Create .thumbnails dir if necessary and create .nomedia in it.\n Copy photobox file to destination dir.\n Handle priority between command line and config file.\n \"\"\"\n if args.update:\n args.sourcedir = args.source.sourcedir\n args.bydir = args.source.bydir\n args.bydate = args.source.bydate\n args.diary = args.source.diary\n args.recursive = args.source.recursive\n args.dates = args.source.dates\n args.github_pages = args.source.github_pages\n elif args.gallery:\n args.source.sourcedir = args.sourcedir\n args.source.bydir = args.bydir\n args.source.bydate = args.bydate\n args.source.diary = args.diary\n args.source.recursive = args.recursive\n args.source.dates = args.dates\n args.source.github_pages = args.github_pages\n update_config(args)\n if args.github_pages:\n args.html_suffix = '.html'\n else:\n args.html_suffix = '.htm'\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext:\n args.rootname = os.path.basename(args.rootarg)\n else:\n args.rootname = 'index' + args.html_suffix\n if args.sourcedir:\n args.sourcedir = os.path.abspath(args.sourcedir)\n if os.path.splitdrive(args.sourcedir)[0]:\n drive, rest = os.path.splitdrive(args.sourcedir)\n args.sourcedir = drive.upper() + rest\n if not os.path.isdir(args.sourcedir):\n error('Directory not found', args.sourcedir)\n elif args.gallery and args.diary is False and args.update is None:\n error('Directory not found', 'Use --sourcedir')\n if args.dest:\n args.dest = os.path.abspath(args.dest)\n if args.dest is None:\n args.dest = args.root\n if args.blogger and args.urlblogger is None:\n error('No blogger url (--url)')\n if args.gallery or args.update:\n for exe in ('ffmpeg', 'ffprobe'):\n try:\n check_output([exe, '-version'])\n except FileNotFoundError:\n error('File not found', exe)\n if args.github_pages:\n args.thumbrep = 'thumbnails'\n else:\n args.thumbrep = '.thumbnails'\n args.thumbdir = os.path.join(args.dest, args.thumbrep)\n if not os.path.exists(args.thumbdir):\n os.mkdir(args.thumbdir)\n open(os.path.join(args.thumbdir, '.nomedia'), 'a').close()\n favicondst = os.path.join(args.dest, 'favicon.ico')\n if not os.path.isfile(favicondst):\n faviconsrc = os.path.join(os.path.dirname(__file__), 'favicon.ico')\n shutil.copyfile(faviconsrc, favicondst)\n photoboxdir = os.path.join(args.dest, 'photobox')\n if not os.path.exists(photoboxdir):\n photoboxsrc = os.path.join(os.path.dirname(__file__), 'photobox')\n shutil.copytree(photoboxsrc, photoboxdir)\n if args.dates:\n if not (args.gallery or args.create):\n pass\n if args.dates == 'source':\n pass\n elif args.dates == 'diary':\n if args.create:\n error('Incorrect date format', args.dates)\n elif re.match('\\\\d+-\\\\d+', args.dates):\n date1, date2 = args.dates.split('-')\n if validate_date(date1) and validate_date(date2):\n args.dates = date1, date2\n else:\n error('Incorrect date format', args.dates)\n else:\n error('Incorrect date format', args.dates)\n\n\ndef main(argstring=None):\n colorama.init()\n args = parse_command_line(argstring)\n setup_part1(args)\n read_config(args)\n setup_part2(args)\n try:\n if args.gallery or args.update:\n create_gallery(args)\n elif args.create:\n create_diary(args)\n elif args.blogger:\n prepare_for_blogger(args)\n elif args.idem:\n idempotence(args)\n elif args.setcfg:\n setconfig_cmd(args)\n except KeyboardInterrupt:\n warning('Interrupted by user.')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Post:\n\n def __init__(self, date, text, medias):\n self.date = date\n self.text = text\n self.medias = medias\n self.dcim = []\n self.daterank = 0\n self.extra = False\n\n def __lt__(self, other):\n return self.date < other.date\n\n @classmethod\n def from_markdown(cls, post):\n m = re.match('\\\\[(\\\\d\\\\d\\\\d\\\\d/\\\\d\\\\d/\\\\d\\\\d)\\\\]\\\\n*', post[0])\n if m:\n date = m.group(1).replace('/', '')\n if not validate_date(date):\n error('Incorrect date value:', date)\n del post[0]\n else:\n error('No date in post', ' '.join(post))\n while post and not post[0].strip():\n del post[0]\n text = ''\n while post and not re.match('!?\\\\[\\\\]', post[0]):\n text += post[0]\n del post[0]\n text = re.sub('\\\\n\\\\n$', '\\n', text)\n medias = list()\n while post and (match := re.match('!?\\\\[\\\\]\\\\((.*)\\\\)', post[0])):\n media = match.group(1)\n caption = None\n del post[0]\n if post and not re.match('!?\\\\[\\\\]', post[0]):\n caption = post[0].strip()\n del post[0]\n if match.group(0)[0] == '!':\n medias.append(PostImage(caption, media))\n else:\n medias.append(PostVideo(caption, media))\n return cls(date, text, medias)\n\n @classmethod\n def from_date(cls, date):\n dt = datetime.datetime.strptime(date, '%Y%m%d')\n datetext = dt.strftime('%A %d %B %Y').capitalize()\n post = cls(date, text=datetext, medias=[])\n post.daterank = 1\n return post\n\n def to_html(self, args, target='regular'):\n if target == 'regular':\n if args.diary:\n return self.to_html_diary(args)\n else:\n return self.to_html_regular(args)\n if target == 'blogger':\n return self.to_html_blogger()\n\n def to_html_regular(self, args):\n html = list()\n if self.text:\n html.append(markdown.markdown(self.text))\n subdirs, dcim = dispatch_post_items(self.dcim)\n if self.dcim:\n html.append(SEP)\n for media in subdirs:\n html.append(media.to_html_dcim(args))\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n return html\n\n def to_html_diary(self, args):\n html = list()\n if self.extra:\n html.append('<div class=\"extra\">')\n if self.text:\n html.append(markdown.markdown(self.text))\n if self.medias:\n html.append(f'<div id=\"gallery-blog-{self.date}-{self.daterank}\">')\n for media in self.medias:\n html.append(media.to_html_post(args))\n html.append('</div>')\n _, dcim = dispatch_post_items(self.dcim)\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n html.append(SEP)\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n if self.extra:\n html.append('</div>')\n return html\n\n def to_html_blogger(self):\n html = list()\n html.append(markdown.markdown(self.text))\n for image in self.medias:\n html.append(image.to_html_blogger())\n html.append(SEP)\n return html\n\n\nclass PostItem:\n\n def __init__(self, caption, uri, thumb=None, thumbsize=None, descr=''):\n self.caption = caption\n self.uri = uri\n self.basename = os.path.basename(uri)\n self.thumb = thumb\n self.thumbsize = thumbsize\n self.descr = descr\n self.resized_url = None\n\n\nclass PostImage(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '' % (self.uri,)\n else:\n return '\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return IMGPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return IMGPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return IMGDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n if not self.caption:\n return BIMGPAT % (self.uri, self.resized_url)\n else:\n return f'{BIMGPAT}\\n{CAPTION_PAT}' % (self.uri, self.\n resized_url, self.caption)\n\n\nclass PostVideo(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '[](%s)' % (self.uri,)\n else:\n return '[](%s)\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return VIDPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return VIDPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return VIDDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n x = f'<p style=\"text-align: center;\">{self.iframe}</p>'\n if not self.caption:\n return x\n else:\n return f'%s\\n{CAPTION_PAT}' % (x, self.caption)\n\n\nclass PostSubdir(PostItem):\n\n def to_html_dcim(self, args):\n basename = os.path.basename(self.htmname)\n posts = self.posts\n title = self.caption\n print_html(args, posts, title, self.htmname)\n if not self.caption:\n return DIRPOST % (basename, self.thumb, *self.thumbsize)\n else:\n return DIRPOSTCAPTION % (basename, self.thumb, *self.thumbsize,\n self.caption)\n\n\n<mask token>\n\n\ndef parse_markdown(filename):\n \"\"\"\n Generate Post objects from markdown. Date must be present in each post and\n posts must be ordrered by date.\n \"\"\"\n if not os.path.exists(filename):\n error('File not found', filename)\n posts = list()\n with open(filename, encoding='utf-8') as f:\n line = next(f)\n if line.startswith('# '):\n title = line[2:].strip()\n record = []\n next(f)\n else:\n title = None\n record = [line]\n for line in f:\n if not line.startswith('___'):\n record.append(line)\n else:\n posts.append(Post.from_markdown(record))\n record = []\n daterank = defaultdict(int)\n for post in posts:\n daterank[post.date] += 1\n post.daterank = daterank[post.date]\n for post1, post2 in zip(posts[:-1], posts[1:]):\n if post1.date > post2.date:\n error('Posts are not ordered', f'{post1.date} > {post2.date}')\n return title, posts\n\n\ndef print_markdown(posts, title, fullname):\n with open(fullname, 'wt', encoding='utf-8') as fdst:\n print(f'# {title}\\n', file=fdst)\n for post in posts:\n date = f'[{post.date[0:4]}/{post.date[4:6]}/{post.date[6:8]}]'\n print(date, file=fdst)\n if post.text:\n print(file=fdst)\n for line in post.text.splitlines():\n if not line:\n print(file=fdst)\n else:\n for chunk in textwrap.wrap(line, width=78):\n print(chunk, file=fdst)\n if post.medias:\n print(file=fdst)\n for media in post.medias:\n print(media.to_markdown(), file=fdst)\n print('______', file=fdst)\n\n\n<mask token>\n\n\ndef print_html(args, posts, title, html_name, target='regular'):\n assert target in ('regular', 'blogger')\n with io.StringIO() as f:\n print_html_to_stream(args, posts, title, f, target)\n html = f.getvalue()\n if html_name:\n if os.path.exists(html_name):\n with open(html_name, 'rt', encoding='utf-8') as f:\n html0 = f.read()\n if html == html0:\n return None\n with open(html_name, 'wt', encoding='utf-8') as f:\n f.write(html)\n return None\n else:\n return html\n\n\n<mask token>\n\n\ndef is_image_file(name):\n return os.path.splitext(name)[1].lower() in ('.jpg', '.jpeg', '.png',\n '.gif', '.bmp', '.webp', '.tif')\n\n\n<mask token>\n\n\ndef is_media(name):\n return is_image_file(name) or is_video_file(name)\n\n\n<mask token>\n\n\ndef date_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})(?:\\\\D|$)', name, re.ASCII)):\n digits = match.group(1)\n if validate_date(digits):\n return digits\n return None\n\n\ndef date_from_item(filename):\n if (date := date_from_name(filename)):\n return date\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%Y%m%d')\n\n\ndef time_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})\\\\D(\\\\d{6})(?:\\\\D|$)', name,\n re.ASCII)):\n digits = match.group(2)\n hour, minute, second = int(digits[0:2]), int(digits[2:4]), int(digits\n [4:6])\n if 0 <= hour < 24 and 0 <= minute < 60 and 0 <= second < 60:\n return digits\n return None\n\n\ndef time_from_item(filename):\n if (time := time_from_name(filename)):\n return time\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%H%M%S')\n\n\n<mask token>\n\n\ndef get_image_info(filename):\n date = date_from_item(filename)\n time = time_from_item(filename)\n img = Image.open(filename)\n width, height = img.size\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n return (date, time, width, height, size\n ), f'{date} {time}, dim={width}x{height}, {size} MB'\n\n\ndef get_video_info(filename, info_fullname):\n if os.path.exists(info_fullname):\n with open(info_fullname) as f:\n info = f.readline().split()\n date, time, width, height, size, duration, fps = info[0], info[1], int(\n info[2]), int(info[3]), float(info[4]), int(info[5]), float(info[6]\n )\n formatted_info = format_video_info(date, time, width, height, size,\n duration, fps)\n return (date, time, width, height, size, duration, fps), formatted_info\n else:\n info, formatted_info = make_video_info(filename, info_fullname)\n with open(info_fullname, 'wt') as f:\n print(' '.join([str(_) for _ in info]), file=f)\n return info, formatted_info\n\n\ndef make_video_info(filename, info_fullname):\n date = date_from_item(filename)\n time = time_from_item(filename)\n command = [*FFPROBE_CMD.split(), filename]\n try:\n output = check_output(command, stderr=STDOUT).decode()\n width, height, fps, duration = parse_ffprobe_output(output)\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n output = format_video_info(date, time, width, height, size,\n duration, fps)\n except CalledProcessError as e:\n output = e.output.decode()\n warning(output)\n raise\n return (date, time, width, height, size, duration, fps), output\n\n\ndef parse_ffprobe_output(ffprobe_output):\n match = re.match(\n '(\\\\d+),(\\\\d+),(\\\\d+)/(\\\\d+),(\\\\d+/\\\\d+).*\\\\s(\\\\d+\\\\.\\\\d+)',\n ffprobe_output, re.DOTALL)\n width = int(match.group(1))\n height = int(match.group(2))\n fps = round(int(match.group(3)) / int(match.group(4)), 1)\n duration = round(float(match.group(6)))\n return width, height, fps, duration\n\n\ndef format_video_info(date, time, width, height, size, duration, fps):\n return (\n f'{date} {time}, dim={width}x{height}, {format_duration(duration)}, fps={fps}, {size} MB'\n )\n\n\n<mask token>\n\n\ndef thumbname(name, key):\n return key + '-' + name + '.jpg'\n\n\ndef size_thumbnail(width, height, maxdim):\n if width >= height:\n return maxdim, int(round(maxdim * height / width))\n else:\n return int(round(maxdim * width / height)), maxdim\n\n\ndef make_thumbnail_image(args, image_name, thumb_name, size):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_image(image_name, thumb_name, size)\n\n\ndef create_thumbnail_image(image_name, thumb_name, size):\n imgobj = Image.open(image_name)\n if imgobj.mode != 'RGBA' and image_name.endswith('.jpg') and not (\n image_name.endswith('.gif') and imgobj.info.get('transparency')):\n imgobj = imgobj.convert('RGBA')\n imgobj.thumbnail(size, Image.LANCZOS)\n imgobj = imgobj.convert('RGB')\n imgobj.save(thumb_name)\n\n\ndef make_thumbnail_video(args, video_name, thumb_name, size, duration):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_video(args, video_name, thumb_name, size, duration)\n\n\n<mask token>\n\n\ndef create_thumbnail_video(args, filename, thumbname, size, duration):\n delay = min(duration - 1, args.thumbnails.thumbdelay)\n sizearg = '%dx%d' % size\n command = (\n 'ffmpeg -y -v error -itsoffset -%d -i \"%s\" -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s \"%s\"'\n )\n command = command % (delay, filename, sizearg, thumbname)\n result = os.system(command)\n try:\n img1 = Image.open(thumbname)\n except:\n warning('Unable to save thumbnail for', filename)\n return\n img2 = Image.open(io.BytesIO(base64.b64decode(VIDEO_ICON)))\n width, height = img1.size\n img1.paste(img2, (6, height - 20 - 6), None)\n img1.save(thumbname)\n\n\n<mask token>\n\n\ndef create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir):\n\n def size_thumbnail(width, height, xmax, ymax):\n width2 = xmax\n height2 = int(round(xmax * height / width))\n if height2 < ymax:\n width2 = int(round(ymax * width / height))\n height2 = ymax\n return width2, height2\n thumblist = [os.path.basename(item.thumb) for item in items]\n widthnum, heightnum, width, height, offsetx, offsety = mosaic_geometry(size\n , thumblist)\n thumbnum = widthnum * heightnum\n img = Image.new('RGB', size, SUBDIR_BACKCOL)\n for ind, thumb in enumerate(thumblist[:min(thumbnum, len(thumblist))]):\n row = ind // widthnum\n col = ind % widthnum\n img2 = Image.open(os.path.join(thumbdir, thumb))\n w, h = size_thumbnail(*img2.size, width[col], height[row])\n cropdim = (w - width[col]) // 2, (h - height[row]) // 2, (w - width\n [col]) // 2 + width[col], (h - height[row]) // 2 + height[row]\n img2 = img2.resize((w, h), Image.LANCZOS)\n img2 = img2.crop(cropdim)\n img.paste(img2, (offsetx[col], offsety[row]))\n if os.path.exists(thumb_name):\n imgref = Image.open(thumb_name)\n byteio = io.BytesIO()\n img.save(byteio, 'JPEG')\n byteio.seek(0)\n imgnew = Image.open(byteio)\n diff = ImageChops.difference(imgnew, imgref)\n if diff.getbbox() is None:\n return\n img.save(thumb_name)\n\n\ndef mosaic_geometry(size, thumblist):\n if len(thumblist) == 1:\n widthnum = 1\n heightnum = 1\n elif len(thumblist) <= 3:\n widthnum = 1\n heightnum = 2\n elif len(thumblist) <= 8:\n widthnum = 2\n heightnum = 2\n else:\n widthnum = 3\n heightnum = 3\n if widthnum == 1:\n width = [size[0] - 2]\n else:\n width = [size[0] // widthnum - 2] * (widthnum - 1)\n width.append(size[0] - (1 + sum(width) + 2 * len(width) + 1))\n if heightnum == 1:\n height = [size[1] - 2]\n else:\n height = [size[1] // heightnum - 2] * (heightnum - 1)\n height.append(size[1] - (1 + sum(height) + 2 * len(height) + 1))\n offsetx = [1]\n for w in width[:-1]:\n offsetx.append(offsetx[-1] + w + 2)\n offsety = [1]\n for h in height[:-1]:\n offsety.append(offsety[-1] + h + 2)\n return widthnum, heightnum, width, height, offsetx, offsety\n\n\n<mask token>\n\n\ndef list_of_htmlfiles_in_items(itemlist):\n htmlist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n htmlist.append(item.htmname)\n htmlist.extend(list_of_htmlfiles_in_items(item.sublist))\n return htmlist\n\n\ndef list_of_thumbnails(posts, diary=False):\n thumblist = list()\n for post in posts:\n thumblist.extend(list_of_thumbnails_in_items(post.medias))\n if diary is False:\n thumblist.extend(list_of_thumbnails_in_items(post.dcim))\n return thumblist\n\n\ndef list_of_thumbnails_in_items(itemlist):\n thumblist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n thumblist.append(os.path.basename(item.thumb))\n thumblist.extend(list_of_thumbnails_in_items(item.sublist))\n else:\n thumblist.append(os.path.basename(item.thumb))\n return thumblist\n\n\ndef purge_htmlfiles(args, posts):\n \"\"\"\n Purge root dir from irrelevant html files\n \"\"\"\n htmlist = list_of_htmlfiles(args, posts)\n html_to_remove = list()\n for fullname in glob.glob(os.path.join(args.root, '*.htm*')):\n if fullname not in htmlist:\n html_to_remove.append(fullname)\n if len(html_to_remove) > args.thumbnails.threshold_htmlfiles:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(html_to_remove)} html files to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in html_to_remove:\n print('Removing html files', name)\n os.remove(name)\n\n\ndef purge_thumbnails(args, thumbdir, posts, diary=False):\n \"\"\"\n Purge thumbnail dir from irrelevant thumbnails\n \"\"\"\n thumblist = list_of_thumbnails(posts, diary)\n thumbs_to_remove = list()\n for fullname in glob.glob(os.path.join(thumbdir, '*.jpg')):\n if os.path.basename(fullname) not in thumblist:\n thumbs_to_remove.append(fullname)\n if len(thumbs_to_remove) > args.thumbnails.threshold_thumbs:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(thumbs_to_remove)} thumbnails to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in thumbs_to_remove:\n print('Removing thumbnail', name)\n os.remove(name)\n info_fullname = os.path.splitext(name)[0] + '.info'\n if os.path.exists(info_fullname):\n os.remove(info_fullname)\n\n\ndef is_media_within_dates(fullname, dates):\n if is_media(fullname):\n if type(dates) == tuple:\n return dates[0] <= date_from_item(fullname) <= dates[1]\n else:\n return True\n else:\n return False\n\n\ndef sorted_listdir(filelist):\n like_windows_explorer = True\n if not filelist:\n return filelist\n if like_windows_explorer:\n maxlen = max(len(os.path.splitext(name)[0]) for name in filelist)\n\n def keyfunc(name):\n root, ext = os.path.splitext(name.lower())\n return root.ljust(maxlen, ' ') + ext\n else:\n keyfunc = str.lower\n return sorted(filelist, key=keyfunc)\n\n\n<mask token>\n\n\ndef list_of_medias(args, sourcedir, recursive):\n \"\"\"\n Return the list of full paths for pictures and movies in source directory\n \"\"\"\n files = list_of_files(sourcedir, recursive)\n return [_ for _ in files if is_media_within_dates(_, args.dates)]\n\n\n<mask token>\n\n\ndef dispatch_post_items(list_of_post_items):\n subdirs = [_ for _ in list_of_post_items if type(_) is PostSubdir]\n medias = [_ for _ in list_of_post_items if type(_) is not PostSubdir]\n return subdirs, medias\n\n\ndef create_item(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n if os.path.isfile(media_fullname):\n if is_image_file(media_fullname):\n return create_item_image(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_video(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_subdir(args, media_fullname, sourcedir, thumbdir,\n key, thumbmax)\n\n\ndef create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n try:\n info, infofmt = get_image_info(media_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_image(args, media_fullname, thumb_fullname, thumbsize)\n return PostImage(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except PIL.UnidentifiedImageError:\n warning('Unable to read image', media_fullname)\n return None\n\n\ndef create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n info_fullname = os.path.splitext(thumb_fullname)[0] + '.info'\n try:\n info, infofmt = get_video_info(media_fullname, info_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_video(args, media_fullname, thumb_fullname,\n thumbsize, duration=info[5])\n return PostVideo(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except CalledProcessError:\n warning('Unable to read video', media_fullname)\n return None\n\n\n<mask token>\n\n\ndef relative_name(media_fullname, sourcedir):\n \"\"\"\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest/OCT_20000112_000004.jpg\n -->\n deeper2_deepest_OCT_20000112_000004.jpg\n\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest\n -->\n deeper2_deepest\n \"\"\"\n x = os.path.relpath(media_fullname, sourcedir)\n x = x.replace('\\\\', '_').replace('/', '_').replace('#', '_')\n return x\n\n\n<mask token>\n\n\ndef make_posts_from_diary(args):\n md_filename = os.path.join(args.root, 'index.md')\n if os.path.exists(md_filename):\n title, posts = parse_markdown(md_filename)\n else:\n error('File not found', md_filename)\n for post in posts:\n for media in post.medias:\n media_fullname = os.path.join(args.root, media.uri)\n item = create_item(args, media_fullname, args.root, args.\n thumbdir, 'post', 400)\n media.thumb = item.thumb\n media.thumbsize = item.thumbsize\n media.descr = item.descr\n return title, posts\n\n\n<mask token>\n\n\ndef make_posts_from_subdir(args, dirname):\n if args.bydir is False:\n medias_ext = list_of_medias(args, dirname, args.recursive)\n else:\n medias_ext = list_of_medias_ext(args, dirname)\n postmedias = list()\n for item in medias_ext:\n postmedia = create_item(args, item, args.sourcedir, args.thumbdir,\n 'dcim', 300)\n if postmedia is not None:\n postmedias.append(postmedia)\n post = Post(date='00000000', text='', medias=[])\n post.dcim = postmedias\n posts = [post]\n title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.\n sourcedir)[0]\n return title, posts\n\n\n<mask token>\n\n\ndef create_gallery(args):\n title, posts = make_posts(args, args.sourcedir)\n print_html(args, posts, title, os.path.join(args.dest, args.rootname),\n 'regular')\n purge_htmlfiles(args, posts)\n if args.diary and not args.sourcedir:\n purge_thumbnails(args, args.thumbdir, posts, diary=True)\n else:\n purge_thumbnails(args, args.thumbdir, posts)\n\n\ndef create_diary(args):\n medias = list_of_medias(args, args.sourcedir, args.recursive)\n if args.dates == 'diary':\n assert 0\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <=\n date <= date2}\n title = args.sourcedir\n posts = list()\n for date in sorted(required_dates):\n posts.append(Post.from_date(date))\n os.makedirs(args.root, exist_ok=True)\n print_markdown(posts, title, os.path.join(args.root, 'index.md'))\n\n\n<mask token>\n\n\ndef compare_image_buffers(imgbuf1, imgbuf2):\n \"\"\"\n return True if images read on file are identical, False otherwise\n \"\"\"\n with io.BytesIO(imgbuf1) as imgio1, io.BytesIO(imgbuf2) as imgio2:\n img1 = Image.open(imgio1)\n img2 = Image.open(imgio2)\n diff = ImageChops.difference(img1, img2)\n return not diff.getbbox()\n\n\ndef check_images(args, posts, online_images):\n result = True\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.basename in online_images:\n with open(os.path.join(args.root, media.uri), 'rb') as f:\n imgbuf1 = f.read()\n try:\n with urlopen(online_images[media.basename][0]) as u:\n imgbuf2 = u.read()\n except FileNotFoundError:\n print('File not found', online_images[media.\n basename][0])\n next\n if compare_image_buffers(imgbuf1, imgbuf2) is False:\n print('Files are different, upload', media.basename)\n elif 1:\n print('File already online', media.basename)\n else:\n print('File is absent, upload', media.basename)\n result = False\n elif type(media) is PostVideo:\n print('Video not checked', media.basename)\n else:\n assert False\n return result\n\n\ndef compose_blogger_html(args, title, posts, imgdata, online_videos):\n \"\"\" Compose html with blogger image urls\n \"\"\"\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.uri not in imgdata:\n print('Image missing: ', media.uri)\n else:\n img_url, resized_url = imgdata[media.uri]\n media.uri = img_url\n media.resized_url = resized_url\n elif type(media) is PostVideo:\n if not online_videos:\n print('Video missing: ', media.uri)\n else:\n media.iframe = online_videos[0]\n del online_videos[0]\n else:\n assert False\n return print_html(args, posts, title, '', target='blogger')\n\n\ndef prepare_for_blogger(args):\n \"\"\"\n Export blogger html to clipboard.\n If --full, export complete html, otherwise export html extract ready to\n paste into blogger edit mode.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n online_images, online_videos = online_images_url(args)\n if args.check_images and check_images(args, posts, online_images) is False:\n pass\n html = compose_blogger_html(args, title, posts, online_images,\n online_videos)\n if args.full is False:\n html = re.search('<body>(.*)?</body>', html, flags=re.DOTALL).group(1)\n html = re.sub('<script>.*?</script>', '', html, flags=re.DOTALL)\n html = STYLE.replace('%%', '%') + html\n if args.dest:\n with open(args.dest, 'wt', encoding='utf-8') as f:\n f.write(html)\n else:\n clipboard.copy(html)\n\n\ndef idempotence(args):\n \"\"\"\n For testing identity between a diary file and the fle obtained after reading\n and printing it. See testing.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n print_markdown(posts, title, os.path.join(args.dest, 'index.md'))\n\n\n<mask token>\n\n\nclass MyConfigParser(ConfigParser):\n \"\"\"Add input checking.\"\"\"\n\n def __init__(self):\n ConfigParser.__init__(self, inline_comment_prefixes=(';',))\n\n def error(self, section, entry):\n error('Missing or incorrect config value:', '[%s]%s' % (section, entry)\n )\n\n def getint(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getint(self, section, entry)\n else:\n return ConfigParser.getint(self, section, entry, raw=True,\n vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n def getboolean(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getboolean(self, section, entry)\n else:\n return ConfigParser.getboolean(self, section, entry, raw=\n True, vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n\ndef configfilename(params):\n return os.path.join(params.root, '.config.ini')\n\n\ndef createconfig(config_filename):\n with open(config_filename, 'wt') as f:\n f.writelines(CONFIG_DEFAULTS)\n\n\ndef read_config(params):\n config_filename = configfilename(params)\n try:\n if not os.path.exists(config_filename) or params.resetcfg:\n createconfig(config_filename)\n except:\n error('Error creating configuration file')\n try:\n getconfig(params, config_filename)\n except Exception as e:\n error('Error reading configuration file.', str(e), 'Use --resetcfg')\n\n\n<mask token>\n\n\ndef setconfig_cmd(args):\n config_filename = configfilename(args)\n setconfig(config_filename, *args.setcfg)\n\n\ndef update_config(args):\n updates = ('sourcedir', args.sourcedir), ('bydir', BOOL[args.bydir]), (\n 'bydate', BOOL[args.bydate]), ('diary', BOOL[args.diary]), ('recursive'\n , BOOL[args.recursive]), ('dates', args.dates), ('github_pages',\n BOOL[args.github_pages])\n cfgname = configfilename(args)\n with open(cfgname) as f:\n cfglines = [_.strip() for _ in f.readlines()]\n for key, value in updates:\n for iline, line in enumerate(cfglines):\n if line.startswith(key):\n cfglines[iline] = f'{key} = {value}'\n break\n with open(cfgname, 'wt') as f:\n for line in cfglines:\n print(line, file=f)\n\n\ndef warning(*msg):\n print(colorama.Fore.YELLOW + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n\n\n<mask token>\n\n\ndef errorcode(msg):\n return ERRORS.splitlines().index(msg) + 1\n\n\ndef error(*msg):\n print(colorama.Fore.RED + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n sys.exit(errorcode(msg[0]))\n\n\n<mask token>\n\n\ndef setup_part1(args):\n \"\"\"\n Made before reading config file (config file located in args.root).\n Check and normalize root path.\n \"\"\"\n args.rootarg = args.root\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext == '':\n pass\n else:\n args.root = os.path.dirname(args.root)\n if args.root:\n args.root = os.path.abspath(args.root)\n if not os.path.isdir(args.root):\n if args.gallery:\n os.mkdir(args.root)\n else:\n error('Directory not found', args.root)\n\n\ndef setup_part2(args):\n \"\"\"\n Made after reading config file.\n Check for ffmpeg in path.\n Create .thumbnails dir if necessary and create .nomedia in it.\n Copy photobox file to destination dir.\n Handle priority between command line and config file.\n \"\"\"\n if args.update:\n args.sourcedir = args.source.sourcedir\n args.bydir = args.source.bydir\n args.bydate = args.source.bydate\n args.diary = args.source.diary\n args.recursive = args.source.recursive\n args.dates = args.source.dates\n args.github_pages = args.source.github_pages\n elif args.gallery:\n args.source.sourcedir = args.sourcedir\n args.source.bydir = args.bydir\n args.source.bydate = args.bydate\n args.source.diary = args.diary\n args.source.recursive = args.recursive\n args.source.dates = args.dates\n args.source.github_pages = args.github_pages\n update_config(args)\n if args.github_pages:\n args.html_suffix = '.html'\n else:\n args.html_suffix = '.htm'\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext:\n args.rootname = os.path.basename(args.rootarg)\n else:\n args.rootname = 'index' + args.html_suffix\n if args.sourcedir:\n args.sourcedir = os.path.abspath(args.sourcedir)\n if os.path.splitdrive(args.sourcedir)[0]:\n drive, rest = os.path.splitdrive(args.sourcedir)\n args.sourcedir = drive.upper() + rest\n if not os.path.isdir(args.sourcedir):\n error('Directory not found', args.sourcedir)\n elif args.gallery and args.diary is False and args.update is None:\n error('Directory not found', 'Use --sourcedir')\n if args.dest:\n args.dest = os.path.abspath(args.dest)\n if args.dest is None:\n args.dest = args.root\n if args.blogger and args.urlblogger is None:\n error('No blogger url (--url)')\n if args.gallery or args.update:\n for exe in ('ffmpeg', 'ffprobe'):\n try:\n check_output([exe, '-version'])\n except FileNotFoundError:\n error('File not found', exe)\n if args.github_pages:\n args.thumbrep = 'thumbnails'\n else:\n args.thumbrep = '.thumbnails'\n args.thumbdir = os.path.join(args.dest, args.thumbrep)\n if not os.path.exists(args.thumbdir):\n os.mkdir(args.thumbdir)\n open(os.path.join(args.thumbdir, '.nomedia'), 'a').close()\n favicondst = os.path.join(args.dest, 'favicon.ico')\n if not os.path.isfile(favicondst):\n faviconsrc = os.path.join(os.path.dirname(__file__), 'favicon.ico')\n shutil.copyfile(faviconsrc, favicondst)\n photoboxdir = os.path.join(args.dest, 'photobox')\n if not os.path.exists(photoboxdir):\n photoboxsrc = os.path.join(os.path.dirname(__file__), 'photobox')\n shutil.copytree(photoboxsrc, photoboxdir)\n if args.dates:\n if not (args.gallery or args.create):\n pass\n if args.dates == 'source':\n pass\n elif args.dates == 'diary':\n if args.create:\n error('Incorrect date format', args.dates)\n elif re.match('\\\\d+-\\\\d+', args.dates):\n date1, date2 = args.dates.split('-')\n if validate_date(date1) and validate_date(date2):\n args.dates = date1, date2\n else:\n error('Incorrect date format', args.dates)\n else:\n error('Incorrect date format', args.dates)\n\n\ndef main(argstring=None):\n colorama.init()\n args = parse_command_line(argstring)\n setup_part1(args)\n read_config(args)\n setup_part2(args)\n try:\n if args.gallery or args.update:\n create_gallery(args)\n elif args.create:\n create_diary(args)\n elif args.blogger:\n prepare_for_blogger(args)\n elif args.idem:\n idempotence(args)\n elif args.setcfg:\n setconfig_cmd(args)\n except KeyboardInterrupt:\n warning('Interrupted by user.')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Post:\n\n def __init__(self, date, text, medias):\n self.date = date\n self.text = text\n self.medias = medias\n self.dcim = []\n self.daterank = 0\n self.extra = False\n\n def __lt__(self, other):\n return self.date < other.date\n\n @classmethod\n def from_markdown(cls, post):\n m = re.match('\\\\[(\\\\d\\\\d\\\\d\\\\d/\\\\d\\\\d/\\\\d\\\\d)\\\\]\\\\n*', post[0])\n if m:\n date = m.group(1).replace('/', '')\n if not validate_date(date):\n error('Incorrect date value:', date)\n del post[0]\n else:\n error('No date in post', ' '.join(post))\n while post and not post[0].strip():\n del post[0]\n text = ''\n while post and not re.match('!?\\\\[\\\\]', post[0]):\n text += post[0]\n del post[0]\n text = re.sub('\\\\n\\\\n$', '\\n', text)\n medias = list()\n while post and (match := re.match('!?\\\\[\\\\]\\\\((.*)\\\\)', post[0])):\n media = match.group(1)\n caption = None\n del post[0]\n if post and not re.match('!?\\\\[\\\\]', post[0]):\n caption = post[0].strip()\n del post[0]\n if match.group(0)[0] == '!':\n medias.append(PostImage(caption, media))\n else:\n medias.append(PostVideo(caption, media))\n return cls(date, text, medias)\n\n @classmethod\n def from_date(cls, date):\n dt = datetime.datetime.strptime(date, '%Y%m%d')\n datetext = dt.strftime('%A %d %B %Y').capitalize()\n post = cls(date, text=datetext, medias=[])\n post.daterank = 1\n return post\n\n def to_html(self, args, target='regular'):\n if target == 'regular':\n if args.diary:\n return self.to_html_diary(args)\n else:\n return self.to_html_regular(args)\n if target == 'blogger':\n return self.to_html_blogger()\n\n def to_html_regular(self, args):\n html = list()\n if self.text:\n html.append(markdown.markdown(self.text))\n subdirs, dcim = dispatch_post_items(self.dcim)\n if self.dcim:\n html.append(SEP)\n for media in subdirs:\n html.append(media.to_html_dcim(args))\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n return html\n\n def to_html_diary(self, args):\n html = list()\n if self.extra:\n html.append('<div class=\"extra\">')\n if self.text:\n html.append(markdown.markdown(self.text))\n if self.medias:\n html.append(f'<div id=\"gallery-blog-{self.date}-{self.daterank}\">')\n for media in self.medias:\n html.append(media.to_html_post(args))\n html.append('</div>')\n _, dcim = dispatch_post_items(self.dcim)\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n html.append(SEP)\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n if self.extra:\n html.append('</div>')\n return html\n\n def to_html_blogger(self):\n html = list()\n html.append(markdown.markdown(self.text))\n for image in self.medias:\n html.append(image.to_html_blogger())\n html.append(SEP)\n return html\n\n\nclass PostItem:\n\n def __init__(self, caption, uri, thumb=None, thumbsize=None, descr=''):\n self.caption = caption\n self.uri = uri\n self.basename = os.path.basename(uri)\n self.thumb = thumb\n self.thumbsize = thumbsize\n self.descr = descr\n self.resized_url = None\n\n\nclass PostImage(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '' % (self.uri,)\n else:\n return '\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return IMGPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return IMGPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return IMGDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n if not self.caption:\n return BIMGPAT % (self.uri, self.resized_url)\n else:\n return f'{BIMGPAT}\\n{CAPTION_PAT}' % (self.uri, self.\n resized_url, self.caption)\n\n\nclass PostVideo(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '[](%s)' % (self.uri,)\n else:\n return '[](%s)\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return VIDPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return VIDPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return VIDDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n x = f'<p style=\"text-align: center;\">{self.iframe}</p>'\n if not self.caption:\n return x\n else:\n return f'%s\\n{CAPTION_PAT}' % (x, self.caption)\n\n\nclass PostSubdir(PostItem):\n\n def to_html_dcim(self, args):\n basename = os.path.basename(self.htmname)\n posts = self.posts\n title = self.caption\n print_html(args, posts, title, self.htmname)\n if not self.caption:\n return DIRPOST % (basename, self.thumb, *self.thumbsize)\n else:\n return DIRPOSTCAPTION % (basename, self.thumb, *self.thumbsize,\n self.caption)\n\n\n<mask token>\n\n\ndef parse_markdown(filename):\n \"\"\"\n Generate Post objects from markdown. Date must be present in each post and\n posts must be ordrered by date.\n \"\"\"\n if not os.path.exists(filename):\n error('File not found', filename)\n posts = list()\n with open(filename, encoding='utf-8') as f:\n line = next(f)\n if line.startswith('# '):\n title = line[2:].strip()\n record = []\n next(f)\n else:\n title = None\n record = [line]\n for line in f:\n if not line.startswith('___'):\n record.append(line)\n else:\n posts.append(Post.from_markdown(record))\n record = []\n daterank = defaultdict(int)\n for post in posts:\n daterank[post.date] += 1\n post.daterank = daterank[post.date]\n for post1, post2 in zip(posts[:-1], posts[1:]):\n if post1.date > post2.date:\n error('Posts are not ordered', f'{post1.date} > {post2.date}')\n return title, posts\n\n\ndef print_markdown(posts, title, fullname):\n with open(fullname, 'wt', encoding='utf-8') as fdst:\n print(f'# {title}\\n', file=fdst)\n for post in posts:\n date = f'[{post.date[0:4]}/{post.date[4:6]}/{post.date[6:8]}]'\n print(date, file=fdst)\n if post.text:\n print(file=fdst)\n for line in post.text.splitlines():\n if not line:\n print(file=fdst)\n else:\n for chunk in textwrap.wrap(line, width=78):\n print(chunk, file=fdst)\n if post.medias:\n print(file=fdst)\n for media in post.medias:\n print(media.to_markdown(), file=fdst)\n print('______', file=fdst)\n\n\n<mask token>\n\n\ndef print_html(args, posts, title, html_name, target='regular'):\n assert target in ('regular', 'blogger')\n with io.StringIO() as f:\n print_html_to_stream(args, posts, title, f, target)\n html = f.getvalue()\n if html_name:\n if os.path.exists(html_name):\n with open(html_name, 'rt', encoding='utf-8') as f:\n html0 = f.read()\n if html == html0:\n return None\n with open(html_name, 'wt', encoding='utf-8') as f:\n f.write(html)\n return None\n else:\n return html\n\n\n<mask token>\n\n\ndef is_image_file(name):\n return os.path.splitext(name)[1].lower() in ('.jpg', '.jpeg', '.png',\n '.gif', '.bmp', '.webp', '.tif')\n\n\n<mask token>\n\n\ndef is_media(name):\n return is_image_file(name) or is_video_file(name)\n\n\n<mask token>\n\n\ndef date_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})(?:\\\\D|$)', name, re.ASCII)):\n digits = match.group(1)\n if validate_date(digits):\n return digits\n return None\n\n\ndef date_from_item(filename):\n if (date := date_from_name(filename)):\n return date\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%Y%m%d')\n\n\ndef time_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})\\\\D(\\\\d{6})(?:\\\\D|$)', name,\n re.ASCII)):\n digits = match.group(2)\n hour, minute, second = int(digits[0:2]), int(digits[2:4]), int(digits\n [4:6])\n if 0 <= hour < 24 and 0 <= minute < 60 and 0 <= second < 60:\n return digits\n return None\n\n\ndef time_from_item(filename):\n if (time := time_from_name(filename)):\n return time\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%H%M%S')\n\n\n<mask token>\n\n\ndef get_image_info(filename):\n date = date_from_item(filename)\n time = time_from_item(filename)\n img = Image.open(filename)\n width, height = img.size\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n return (date, time, width, height, size\n ), f'{date} {time}, dim={width}x{height}, {size} MB'\n\n\ndef get_video_info(filename, info_fullname):\n if os.path.exists(info_fullname):\n with open(info_fullname) as f:\n info = f.readline().split()\n date, time, width, height, size, duration, fps = info[0], info[1], int(\n info[2]), int(info[3]), float(info[4]), int(info[5]), float(info[6]\n )\n formatted_info = format_video_info(date, time, width, height, size,\n duration, fps)\n return (date, time, width, height, size, duration, fps), formatted_info\n else:\n info, formatted_info = make_video_info(filename, info_fullname)\n with open(info_fullname, 'wt') as f:\n print(' '.join([str(_) for _ in info]), file=f)\n return info, formatted_info\n\n\ndef make_video_info(filename, info_fullname):\n date = date_from_item(filename)\n time = time_from_item(filename)\n command = [*FFPROBE_CMD.split(), filename]\n try:\n output = check_output(command, stderr=STDOUT).decode()\n width, height, fps, duration = parse_ffprobe_output(output)\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n output = format_video_info(date, time, width, height, size,\n duration, fps)\n except CalledProcessError as e:\n output = e.output.decode()\n warning(output)\n raise\n return (date, time, width, height, size, duration, fps), output\n\n\ndef parse_ffprobe_output(ffprobe_output):\n match = re.match(\n '(\\\\d+),(\\\\d+),(\\\\d+)/(\\\\d+),(\\\\d+/\\\\d+).*\\\\s(\\\\d+\\\\.\\\\d+)',\n ffprobe_output, re.DOTALL)\n width = int(match.group(1))\n height = int(match.group(2))\n fps = round(int(match.group(3)) / int(match.group(4)), 1)\n duration = round(float(match.group(6)))\n return width, height, fps, duration\n\n\ndef format_video_info(date, time, width, height, size, duration, fps):\n return (\n f'{date} {time}, dim={width}x{height}, {format_duration(duration)}, fps={fps}, {size} MB'\n )\n\n\n<mask token>\n\n\ndef thumbname(name, key):\n return key + '-' + name + '.jpg'\n\n\ndef size_thumbnail(width, height, maxdim):\n if width >= height:\n return maxdim, int(round(maxdim * height / width))\n else:\n return int(round(maxdim * width / height)), maxdim\n\n\ndef make_thumbnail_image(args, image_name, thumb_name, size):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_image(image_name, thumb_name, size)\n\n\ndef create_thumbnail_image(image_name, thumb_name, size):\n imgobj = Image.open(image_name)\n if imgobj.mode != 'RGBA' and image_name.endswith('.jpg') and not (\n image_name.endswith('.gif') and imgobj.info.get('transparency')):\n imgobj = imgobj.convert('RGBA')\n imgobj.thumbnail(size, Image.LANCZOS)\n imgobj = imgobj.convert('RGB')\n imgobj.save(thumb_name)\n\n\ndef make_thumbnail_video(args, video_name, thumb_name, size, duration):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_video(args, video_name, thumb_name, size, duration)\n\n\n<mask token>\n\n\ndef create_thumbnail_video(args, filename, thumbname, size, duration):\n delay = min(duration - 1, args.thumbnails.thumbdelay)\n sizearg = '%dx%d' % size\n command = (\n 'ffmpeg -y -v error -itsoffset -%d -i \"%s\" -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s \"%s\"'\n )\n command = command % (delay, filename, sizearg, thumbname)\n result = os.system(command)\n try:\n img1 = Image.open(thumbname)\n except:\n warning('Unable to save thumbnail for', filename)\n return\n img2 = Image.open(io.BytesIO(base64.b64decode(VIDEO_ICON)))\n width, height = img1.size\n img1.paste(img2, (6, height - 20 - 6), None)\n img1.save(thumbname)\n\n\n<mask token>\n\n\ndef create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir):\n\n def size_thumbnail(width, height, xmax, ymax):\n width2 = xmax\n height2 = int(round(xmax * height / width))\n if height2 < ymax:\n width2 = int(round(ymax * width / height))\n height2 = ymax\n return width2, height2\n thumblist = [os.path.basename(item.thumb) for item in items]\n widthnum, heightnum, width, height, offsetx, offsety = mosaic_geometry(size\n , thumblist)\n thumbnum = widthnum * heightnum\n img = Image.new('RGB', size, SUBDIR_BACKCOL)\n for ind, thumb in enumerate(thumblist[:min(thumbnum, len(thumblist))]):\n row = ind // widthnum\n col = ind % widthnum\n img2 = Image.open(os.path.join(thumbdir, thumb))\n w, h = size_thumbnail(*img2.size, width[col], height[row])\n cropdim = (w - width[col]) // 2, (h - height[row]) // 2, (w - width\n [col]) // 2 + width[col], (h - height[row]) // 2 + height[row]\n img2 = img2.resize((w, h), Image.LANCZOS)\n img2 = img2.crop(cropdim)\n img.paste(img2, (offsetx[col], offsety[row]))\n if os.path.exists(thumb_name):\n imgref = Image.open(thumb_name)\n byteio = io.BytesIO()\n img.save(byteio, 'JPEG')\n byteio.seek(0)\n imgnew = Image.open(byteio)\n diff = ImageChops.difference(imgnew, imgref)\n if diff.getbbox() is None:\n return\n img.save(thumb_name)\n\n\ndef mosaic_geometry(size, thumblist):\n if len(thumblist) == 1:\n widthnum = 1\n heightnum = 1\n elif len(thumblist) <= 3:\n widthnum = 1\n heightnum = 2\n elif len(thumblist) <= 8:\n widthnum = 2\n heightnum = 2\n else:\n widthnum = 3\n heightnum = 3\n if widthnum == 1:\n width = [size[0] - 2]\n else:\n width = [size[0] // widthnum - 2] * (widthnum - 1)\n width.append(size[0] - (1 + sum(width) + 2 * len(width) + 1))\n if heightnum == 1:\n height = [size[1] - 2]\n else:\n height = [size[1] // heightnum - 2] * (heightnum - 1)\n height.append(size[1] - (1 + sum(height) + 2 * len(height) + 1))\n offsetx = [1]\n for w in width[:-1]:\n offsetx.append(offsetx[-1] + w + 2)\n offsety = [1]\n for h in height[:-1]:\n offsety.append(offsety[-1] + h + 2)\n return widthnum, heightnum, width, height, offsetx, offsety\n\n\n<mask token>\n\n\ndef list_of_htmlfiles_in_items(itemlist):\n htmlist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n htmlist.append(item.htmname)\n htmlist.extend(list_of_htmlfiles_in_items(item.sublist))\n return htmlist\n\n\ndef list_of_thumbnails(posts, diary=False):\n thumblist = list()\n for post in posts:\n thumblist.extend(list_of_thumbnails_in_items(post.medias))\n if diary is False:\n thumblist.extend(list_of_thumbnails_in_items(post.dcim))\n return thumblist\n\n\ndef list_of_thumbnails_in_items(itemlist):\n thumblist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n thumblist.append(os.path.basename(item.thumb))\n thumblist.extend(list_of_thumbnails_in_items(item.sublist))\n else:\n thumblist.append(os.path.basename(item.thumb))\n return thumblist\n\n\ndef purge_htmlfiles(args, posts):\n \"\"\"\n Purge root dir from irrelevant html files\n \"\"\"\n htmlist = list_of_htmlfiles(args, posts)\n html_to_remove = list()\n for fullname in glob.glob(os.path.join(args.root, '*.htm*')):\n if fullname not in htmlist:\n html_to_remove.append(fullname)\n if len(html_to_remove) > args.thumbnails.threshold_htmlfiles:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(html_to_remove)} html files to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in html_to_remove:\n print('Removing html files', name)\n os.remove(name)\n\n\ndef purge_thumbnails(args, thumbdir, posts, diary=False):\n \"\"\"\n Purge thumbnail dir from irrelevant thumbnails\n \"\"\"\n thumblist = list_of_thumbnails(posts, diary)\n thumbs_to_remove = list()\n for fullname in glob.glob(os.path.join(thumbdir, '*.jpg')):\n if os.path.basename(fullname) not in thumblist:\n thumbs_to_remove.append(fullname)\n if len(thumbs_to_remove) > args.thumbnails.threshold_thumbs:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(thumbs_to_remove)} thumbnails to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in thumbs_to_remove:\n print('Removing thumbnail', name)\n os.remove(name)\n info_fullname = os.path.splitext(name)[0] + '.info'\n if os.path.exists(info_fullname):\n os.remove(info_fullname)\n\n\ndef is_media_within_dates(fullname, dates):\n if is_media(fullname):\n if type(dates) == tuple:\n return dates[0] <= date_from_item(fullname) <= dates[1]\n else:\n return True\n else:\n return False\n\n\ndef sorted_listdir(filelist):\n like_windows_explorer = True\n if not filelist:\n return filelist\n if like_windows_explorer:\n maxlen = max(len(os.path.splitext(name)[0]) for name in filelist)\n\n def keyfunc(name):\n root, ext = os.path.splitext(name.lower())\n return root.ljust(maxlen, ' ') + ext\n else:\n keyfunc = str.lower\n return sorted(filelist, key=keyfunc)\n\n\ndef list_of_files(sourcedir, recursive):\n \"\"\"\n Return the list of full paths for files in source directory\n \"\"\"\n result = list()\n if recursive is False:\n listdir = sorted_listdir(os.listdir(sourcedir))\n if '.nomedia' not in listdir:\n for basename in listdir:\n result.append(os.path.join(sourcedir, basename))\n else:\n for root, dirs, files in os.walk(sourcedir):\n if '.nomedia' not in files:\n for basename in sorted_listdir(files):\n result.append(os.path.join(root, basename))\n return result\n\n\ndef list_of_medias(args, sourcedir, recursive):\n \"\"\"\n Return the list of full paths for pictures and movies in source directory\n \"\"\"\n files = list_of_files(sourcedir, recursive)\n return [_ for _ in files if is_media_within_dates(_, args.dates)]\n\n\n<mask token>\n\n\ndef dispatch_post_items(list_of_post_items):\n subdirs = [_ for _ in list_of_post_items if type(_) is PostSubdir]\n medias = [_ for _ in list_of_post_items if type(_) is not PostSubdir]\n return subdirs, medias\n\n\ndef create_item(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n if os.path.isfile(media_fullname):\n if is_image_file(media_fullname):\n return create_item_image(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_video(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_subdir(args, media_fullname, sourcedir, thumbdir,\n key, thumbmax)\n\n\ndef create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n try:\n info, infofmt = get_image_info(media_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_image(args, media_fullname, thumb_fullname, thumbsize)\n return PostImage(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except PIL.UnidentifiedImageError:\n warning('Unable to read image', media_fullname)\n return None\n\n\ndef create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n info_fullname = os.path.splitext(thumb_fullname)[0] + '.info'\n try:\n info, infofmt = get_video_info(media_fullname, info_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_video(args, media_fullname, thumb_fullname,\n thumbsize, duration=info[5])\n return PostVideo(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except CalledProcessError:\n warning('Unable to read video', media_fullname)\n return None\n\n\n<mask token>\n\n\ndef relative_name(media_fullname, sourcedir):\n \"\"\"\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest/OCT_20000112_000004.jpg\n -->\n deeper2_deepest_OCT_20000112_000004.jpg\n\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest\n -->\n deeper2_deepest\n \"\"\"\n x = os.path.relpath(media_fullname, sourcedir)\n x = x.replace('\\\\', '_').replace('/', '_').replace('#', '_')\n return x\n\n\n<mask token>\n\n\ndef make_posts_from_diary(args):\n md_filename = os.path.join(args.root, 'index.md')\n if os.path.exists(md_filename):\n title, posts = parse_markdown(md_filename)\n else:\n error('File not found', md_filename)\n for post in posts:\n for media in post.medias:\n media_fullname = os.path.join(args.root, media.uri)\n item = create_item(args, media_fullname, args.root, args.\n thumbdir, 'post', 400)\n media.thumb = item.thumb\n media.thumbsize = item.thumbsize\n media.descr = item.descr\n return title, posts\n\n\ndef create_items_by_date(args, medias, posts):\n if args.dates == 'diary':\n required_dates = {post.date for post in posts}\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <=\n date <= date2}\n bydate = defaultdict(list)\n for media_fullname in medias:\n date = date_from_item(media_fullname)\n if date in required_dates:\n item = create_item(args, media_fullname, args.sourcedir, args.\n thumbdir, 'dcim', 300)\n if item:\n bydate[date].append(item)\n for date, liste in bydate.items():\n liste.sort(key=lambda item: time_from_item(item.uri))\n return bydate\n\n\n<mask token>\n\n\ndef make_posts_from_subdir(args, dirname):\n if args.bydir is False:\n medias_ext = list_of_medias(args, dirname, args.recursive)\n else:\n medias_ext = list_of_medias_ext(args, dirname)\n postmedias = list()\n for item in medias_ext:\n postmedia = create_item(args, item, args.sourcedir, args.thumbdir,\n 'dcim', 300)\n if postmedia is not None:\n postmedias.append(postmedia)\n post = Post(date='00000000', text='', medias=[])\n post.dcim = postmedias\n posts = [post]\n title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.\n sourcedir)[0]\n return title, posts\n\n\n<mask token>\n\n\ndef create_gallery(args):\n title, posts = make_posts(args, args.sourcedir)\n print_html(args, posts, title, os.path.join(args.dest, args.rootname),\n 'regular')\n purge_htmlfiles(args, posts)\n if args.diary and not args.sourcedir:\n purge_thumbnails(args, args.thumbdir, posts, diary=True)\n else:\n purge_thumbnails(args, args.thumbdir, posts)\n\n\ndef create_diary(args):\n medias = list_of_medias(args, args.sourcedir, args.recursive)\n if args.dates == 'diary':\n assert 0\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <=\n date <= date2}\n title = args.sourcedir\n posts = list()\n for date in sorted(required_dates):\n posts.append(Post.from_date(date))\n os.makedirs(args.root, exist_ok=True)\n print_markdown(posts, title, os.path.join(args.root, 'index.md'))\n\n\ndef online_images_url(args):\n try:\n if args.urlblogger.startswith('http:') or args.urlblogger.startswith(\n 'https:'):\n with urlopen(args.urlblogger) as u:\n buffer = u.read()\n else:\n with open(args.urlblogger, 'rb') as f:\n buffer = f.read()\n except:\n error('Unable to read url', args.urlblogger)\n buffer = buffer.decode('utf-8')\n online_images = dict()\n for match in re.finditer('<div class=\"separator\"((?!<div).)*?</div>',\n buffer, flags=re.DOTALL):\n div_separator = match.group(0)\n div_separator = div_separator.replace(' ', '')\n elem_div = objectify.fromstring(div_separator)\n for elem_a in elem_div.iterchildren(tag='a'):\n href = elem_a.get('href')\n thumb = elem_a.img.get('src')\n online_images[os.path.basename(href)] = href, thumb\n online_videos = list()\n for match in re.finditer(\n '<iframe allowfullscreen=\"allowfullscreen\".*?</iframe>', buffer,\n flags=re.DOTALL):\n iframe = match.group(0)\n online_videos.append(iframe)\n return online_images, online_videos\n\n\ndef compare_image_buffers(imgbuf1, imgbuf2):\n \"\"\"\n return True if images read on file are identical, False otherwise\n \"\"\"\n with io.BytesIO(imgbuf1) as imgio1, io.BytesIO(imgbuf2) as imgio2:\n img1 = Image.open(imgio1)\n img2 = Image.open(imgio2)\n diff = ImageChops.difference(img1, img2)\n return not diff.getbbox()\n\n\ndef check_images(args, posts, online_images):\n result = True\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.basename in online_images:\n with open(os.path.join(args.root, media.uri), 'rb') as f:\n imgbuf1 = f.read()\n try:\n with urlopen(online_images[media.basename][0]) as u:\n imgbuf2 = u.read()\n except FileNotFoundError:\n print('File not found', online_images[media.\n basename][0])\n next\n if compare_image_buffers(imgbuf1, imgbuf2) is False:\n print('Files are different, upload', media.basename)\n elif 1:\n print('File already online', media.basename)\n else:\n print('File is absent, upload', media.basename)\n result = False\n elif type(media) is PostVideo:\n print('Video not checked', media.basename)\n else:\n assert False\n return result\n\n\ndef compose_blogger_html(args, title, posts, imgdata, online_videos):\n \"\"\" Compose html with blogger image urls\n \"\"\"\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.uri not in imgdata:\n print('Image missing: ', media.uri)\n else:\n img_url, resized_url = imgdata[media.uri]\n media.uri = img_url\n media.resized_url = resized_url\n elif type(media) is PostVideo:\n if not online_videos:\n print('Video missing: ', media.uri)\n else:\n media.iframe = online_videos[0]\n del online_videos[0]\n else:\n assert False\n return print_html(args, posts, title, '', target='blogger')\n\n\ndef prepare_for_blogger(args):\n \"\"\"\n Export blogger html to clipboard.\n If --full, export complete html, otherwise export html extract ready to\n paste into blogger edit mode.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n online_images, online_videos = online_images_url(args)\n if args.check_images and check_images(args, posts, online_images) is False:\n pass\n html = compose_blogger_html(args, title, posts, online_images,\n online_videos)\n if args.full is False:\n html = re.search('<body>(.*)?</body>', html, flags=re.DOTALL).group(1)\n html = re.sub('<script>.*?</script>', '', html, flags=re.DOTALL)\n html = STYLE.replace('%%', '%') + html\n if args.dest:\n with open(args.dest, 'wt', encoding='utf-8') as f:\n f.write(html)\n else:\n clipboard.copy(html)\n\n\ndef idempotence(args):\n \"\"\"\n For testing identity between a diary file and the fle obtained after reading\n and printing it. See testing.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n print_markdown(posts, title, os.path.join(args.dest, 'index.md'))\n\n\n<mask token>\n\n\nclass MyConfigParser(ConfigParser):\n \"\"\"Add input checking.\"\"\"\n\n def __init__(self):\n ConfigParser.__init__(self, inline_comment_prefixes=(';',))\n\n def error(self, section, entry):\n error('Missing or incorrect config value:', '[%s]%s' % (section, entry)\n )\n\n def getint(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getint(self, section, entry)\n else:\n return ConfigParser.getint(self, section, entry, raw=True,\n vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n def getboolean(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getboolean(self, section, entry)\n else:\n return ConfigParser.getboolean(self, section, entry, raw=\n True, vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n\ndef configfilename(params):\n return os.path.join(params.root, '.config.ini')\n\n\ndef createconfig(config_filename):\n with open(config_filename, 'wt') as f:\n f.writelines(CONFIG_DEFAULTS)\n\n\ndef read_config(params):\n config_filename = configfilename(params)\n try:\n if not os.path.exists(config_filename) or params.resetcfg:\n createconfig(config_filename)\n except:\n error('Error creating configuration file')\n try:\n getconfig(params, config_filename)\n except Exception as e:\n error('Error reading configuration file.', str(e), 'Use --resetcfg')\n\n\ndef getconfig(options, config_filename):\n\n\n class Section:\n pass\n options.source = Section()\n options.thumbnails = Section()\n options.photobox = Section()\n config = MyConfigParser()\n config.read(config_filename)\n options.source.sourcedir = config.get('source', 'sourcedir')\n options.source.bydir = config.getboolean('source', 'bydir')\n options.source.bydate = config.getboolean('source', 'bydate')\n options.source.diary = config.getboolean('source', 'diary')\n options.source.recursive = config.getboolean('source', 'recursive')\n options.source.dates = config.get('source', 'dates')\n options.source.github_pages = config.getboolean('source',\n 'github_pages', default=False)\n options.thumbnails.media_description = config.getboolean('thumbnails',\n 'media_description')\n options.thumbnails.subdir_caption = config.getboolean('thumbnails',\n 'subdir_caption')\n options.thumbnails.thumbdelay = config.getint('thumbnails', 'thumbdelay')\n options.thumbnails.threshold_thumbs = config.getint('thumbnails',\n 'threshold_thumbs')\n options.thumbnails.threshold_htmlfiles = config.getint('thumbnails',\n 'threshold_htmlfiles', default=3)\n options.photobox.loop = config.getboolean('photobox', 'loop')\n options.photobox.thumbs = config.getboolean('photobox', 'thumbs')\n options.photobox.autoplay = config.getboolean('photobox', 'autoplay')\n options.photobox.time = config.getint('photobox', 'time')\n options.photobox.zoomable = config.getboolean('photobox', 'zoomable')\n options.photobox.rotatable = config.getboolean('photobox', 'rotatable')\n options.photobox.wheelNextPrev = config.getboolean('photobox',\n 'wheelNextPrev')\n\n\n<mask token>\n\n\ndef setconfig_cmd(args):\n config_filename = configfilename(args)\n setconfig(config_filename, *args.setcfg)\n\n\ndef update_config(args):\n updates = ('sourcedir', args.sourcedir), ('bydir', BOOL[args.bydir]), (\n 'bydate', BOOL[args.bydate]), ('diary', BOOL[args.diary]), ('recursive'\n , BOOL[args.recursive]), ('dates', args.dates), ('github_pages',\n BOOL[args.github_pages])\n cfgname = configfilename(args)\n with open(cfgname) as f:\n cfglines = [_.strip() for _ in f.readlines()]\n for key, value in updates:\n for iline, line in enumerate(cfglines):\n if line.startswith(key):\n cfglines[iline] = f'{key} = {value}'\n break\n with open(cfgname, 'wt') as f:\n for line in cfglines:\n print(line, file=f)\n\n\ndef warning(*msg):\n print(colorama.Fore.YELLOW + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n\n\n<mask token>\n\n\ndef errorcode(msg):\n return ERRORS.splitlines().index(msg) + 1\n\n\ndef error(*msg):\n print(colorama.Fore.RED + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n sys.exit(errorcode(msg[0]))\n\n\n<mask token>\n\n\ndef setup_part1(args):\n \"\"\"\n Made before reading config file (config file located in args.root).\n Check and normalize root path.\n \"\"\"\n args.rootarg = args.root\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext == '':\n pass\n else:\n args.root = os.path.dirname(args.root)\n if args.root:\n args.root = os.path.abspath(args.root)\n if not os.path.isdir(args.root):\n if args.gallery:\n os.mkdir(args.root)\n else:\n error('Directory not found', args.root)\n\n\ndef setup_part2(args):\n \"\"\"\n Made after reading config file.\n Check for ffmpeg in path.\n Create .thumbnails dir if necessary and create .nomedia in it.\n Copy photobox file to destination dir.\n Handle priority between command line and config file.\n \"\"\"\n if args.update:\n args.sourcedir = args.source.sourcedir\n args.bydir = args.source.bydir\n args.bydate = args.source.bydate\n args.diary = args.source.diary\n args.recursive = args.source.recursive\n args.dates = args.source.dates\n args.github_pages = args.source.github_pages\n elif args.gallery:\n args.source.sourcedir = args.sourcedir\n args.source.bydir = args.bydir\n args.source.bydate = args.bydate\n args.source.diary = args.diary\n args.source.recursive = args.recursive\n args.source.dates = args.dates\n args.source.github_pages = args.github_pages\n update_config(args)\n if args.github_pages:\n args.html_suffix = '.html'\n else:\n args.html_suffix = '.htm'\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext:\n args.rootname = os.path.basename(args.rootarg)\n else:\n args.rootname = 'index' + args.html_suffix\n if args.sourcedir:\n args.sourcedir = os.path.abspath(args.sourcedir)\n if os.path.splitdrive(args.sourcedir)[0]:\n drive, rest = os.path.splitdrive(args.sourcedir)\n args.sourcedir = drive.upper() + rest\n if not os.path.isdir(args.sourcedir):\n error('Directory not found', args.sourcedir)\n elif args.gallery and args.diary is False and args.update is None:\n error('Directory not found', 'Use --sourcedir')\n if args.dest:\n args.dest = os.path.abspath(args.dest)\n if args.dest is None:\n args.dest = args.root\n if args.blogger and args.urlblogger is None:\n error('No blogger url (--url)')\n if args.gallery or args.update:\n for exe in ('ffmpeg', 'ffprobe'):\n try:\n check_output([exe, '-version'])\n except FileNotFoundError:\n error('File not found', exe)\n if args.github_pages:\n args.thumbrep = 'thumbnails'\n else:\n args.thumbrep = '.thumbnails'\n args.thumbdir = os.path.join(args.dest, args.thumbrep)\n if not os.path.exists(args.thumbdir):\n os.mkdir(args.thumbdir)\n open(os.path.join(args.thumbdir, '.nomedia'), 'a').close()\n favicondst = os.path.join(args.dest, 'favicon.ico')\n if not os.path.isfile(favicondst):\n faviconsrc = os.path.join(os.path.dirname(__file__), 'favicon.ico')\n shutil.copyfile(faviconsrc, favicondst)\n photoboxdir = os.path.join(args.dest, 'photobox')\n if not os.path.exists(photoboxdir):\n photoboxsrc = os.path.join(os.path.dirname(__file__), 'photobox')\n shutil.copytree(photoboxsrc, photoboxdir)\n if args.dates:\n if not (args.gallery or args.create):\n pass\n if args.dates == 'source':\n pass\n elif args.dates == 'diary':\n if args.create:\n error('Incorrect date format', args.dates)\n elif re.match('\\\\d+-\\\\d+', args.dates):\n date1, date2 = args.dates.split('-')\n if validate_date(date1) and validate_date(date2):\n args.dates = date1, date2\n else:\n error('Incorrect date format', args.dates)\n else:\n error('Incorrect date format', args.dates)\n\n\ndef main(argstring=None):\n colorama.init()\n args = parse_command_line(argstring)\n setup_part1(args)\n read_config(args)\n setup_part2(args)\n try:\n if args.gallery or args.update:\n create_gallery(args)\n elif args.create:\n create_diary(args)\n elif args.blogger:\n prepare_for_blogger(args)\n elif args.idem:\n idempotence(args)\n elif args.setcfg:\n setconfig_cmd(args)\n except KeyboardInterrupt:\n warning('Interrupted by user.')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Post:\n\n def __init__(self, date, text, medias):\n self.date = date\n self.text = text\n self.medias = medias\n self.dcim = []\n self.daterank = 0\n self.extra = False\n\n def __lt__(self, other):\n return self.date < other.date\n\n @classmethod\n def from_markdown(cls, post):\n m = re.match('\\\\[(\\\\d\\\\d\\\\d\\\\d/\\\\d\\\\d/\\\\d\\\\d)\\\\]\\\\n*', post[0])\n if m:\n date = m.group(1).replace('/', '')\n if not validate_date(date):\n error('Incorrect date value:', date)\n del post[0]\n else:\n error('No date in post', ' '.join(post))\n while post and not post[0].strip():\n del post[0]\n text = ''\n while post and not re.match('!?\\\\[\\\\]', post[0]):\n text += post[0]\n del post[0]\n text = re.sub('\\\\n\\\\n$', '\\n', text)\n medias = list()\n while post and (match := re.match('!?\\\\[\\\\]\\\\((.*)\\\\)', post[0])):\n media = match.group(1)\n caption = None\n del post[0]\n if post and not re.match('!?\\\\[\\\\]', post[0]):\n caption = post[0].strip()\n del post[0]\n if match.group(0)[0] == '!':\n medias.append(PostImage(caption, media))\n else:\n medias.append(PostVideo(caption, media))\n return cls(date, text, medias)\n\n @classmethod\n def from_date(cls, date):\n dt = datetime.datetime.strptime(date, '%Y%m%d')\n datetext = dt.strftime('%A %d %B %Y').capitalize()\n post = cls(date, text=datetext, medias=[])\n post.daterank = 1\n return post\n\n def to_html(self, args, target='regular'):\n if target == 'regular':\n if args.diary:\n return self.to_html_diary(args)\n else:\n return self.to_html_regular(args)\n if target == 'blogger':\n return self.to_html_blogger()\n\n def to_html_regular(self, args):\n html = list()\n if self.text:\n html.append(markdown.markdown(self.text))\n subdirs, dcim = dispatch_post_items(self.dcim)\n if self.dcim:\n html.append(SEP)\n for media in subdirs:\n html.append(media.to_html_dcim(args))\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n return html\n\n def to_html_diary(self, args):\n html = list()\n if self.extra:\n html.append('<div class=\"extra\">')\n if self.text:\n html.append(markdown.markdown(self.text))\n if self.medias:\n html.append(f'<div id=\"gallery-blog-{self.date}-{self.daterank}\">')\n for media in self.medias:\n html.append(media.to_html_post(args))\n html.append('</div>')\n _, dcim = dispatch_post_items(self.dcim)\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n html.append(SEP)\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n html.append(SEP)\n if self.extra:\n html.append('</div>')\n return html\n\n def to_html_blogger(self):\n html = list()\n html.append(markdown.markdown(self.text))\n for image in self.medias:\n html.append(image.to_html_blogger())\n html.append(SEP)\n return html\n\n\nclass PostItem:\n\n def __init__(self, caption, uri, thumb=None, thumbsize=None, descr=''):\n self.caption = caption\n self.uri = uri\n self.basename = os.path.basename(uri)\n self.thumb = thumb\n self.thumbsize = thumbsize\n self.descr = descr\n self.resized_url = None\n\n\nclass PostImage(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '' % (self.uri,)\n else:\n return '\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return IMGPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return IMGPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return IMGDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n if not self.caption:\n return BIMGPAT % (self.uri, self.resized_url)\n else:\n return f'{BIMGPAT}\\n{CAPTION_PAT}' % (self.uri, self.\n resized_url, self.caption)\n\n\nclass PostVideo(PostItem):\n\n def to_markdown(self):\n if not self.caption:\n return '[](%s)' % (self.uri,)\n else:\n return '[](%s)\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return VIDPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return VIDPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize,\n descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return VIDDCIM % (relative_url(self.uri, args.root), self.thumb, *\n self.thumbsize, descr)\n\n def to_html_blogger(self):\n x = f'<p style=\"text-align: center;\">{self.iframe}</p>'\n if not self.caption:\n return x\n else:\n return f'%s\\n{CAPTION_PAT}' % (x, self.caption)\n\n\nclass PostSubdir(PostItem):\n\n def to_html_dcim(self, args):\n basename = os.path.basename(self.htmname)\n posts = self.posts\n title = self.caption\n print_html(args, posts, title, self.htmname)\n if not self.caption:\n return DIRPOST % (basename, self.thumb, *self.thumbsize)\n else:\n return DIRPOSTCAPTION % (basename, self.thumb, *self.thumbsize,\n self.caption)\n\n\ndef relative_url(path, root):\n \"\"\"\n returns a normalized url to path relative from root\n \"\"\"\n try:\n url = os.path.relpath(path, root)\n except:\n error('Unable to make a relative url:', url, root)\n url = url.replace('\\\\', '/') if os.sep == '\\\\' else url\n return urllib.parse.quote(url)\n\n\ndef parse_markdown(filename):\n \"\"\"\n Generate Post objects from markdown. Date must be present in each post and\n posts must be ordrered by date.\n \"\"\"\n if not os.path.exists(filename):\n error('File not found', filename)\n posts = list()\n with open(filename, encoding='utf-8') as f:\n line = next(f)\n if line.startswith('# '):\n title = line[2:].strip()\n record = []\n next(f)\n else:\n title = None\n record = [line]\n for line in f:\n if not line.startswith('___'):\n record.append(line)\n else:\n posts.append(Post.from_markdown(record))\n record = []\n daterank = defaultdict(int)\n for post in posts:\n daterank[post.date] += 1\n post.daterank = daterank[post.date]\n for post1, post2 in zip(posts[:-1], posts[1:]):\n if post1.date > post2.date:\n error('Posts are not ordered', f'{post1.date} > {post2.date}')\n return title, posts\n\n\ndef print_markdown(posts, title, fullname):\n with open(fullname, 'wt', encoding='utf-8') as fdst:\n print(f'# {title}\\n', file=fdst)\n for post in posts:\n date = f'[{post.date[0:4]}/{post.date[4:6]}/{post.date[6:8]}]'\n print(date, file=fdst)\n if post.text:\n print(file=fdst)\n for line in post.text.splitlines():\n if not line:\n print(file=fdst)\n else:\n for chunk in textwrap.wrap(line, width=78):\n print(chunk, file=fdst)\n if post.medias:\n print(file=fdst)\n for media in post.medias:\n print(media.to_markdown(), file=fdst)\n print('______', file=fdst)\n\n\ndef compose_html_reduced(args, posts, title, target):\n html = list()\n html.append(START % title)\n for post in posts:\n for line in post.to_html(args, target):\n html.append(line.strip())\n html.append('')\n html.append(END)\n return html\n\n\ndef compose_html_full(args, posts, title, target):\n html = list()\n html.append(START % title)\n if args.diary:\n html.append(BUTTONS)\n for post in posts:\n for line in post.to_html(args, target):\n html.append(line.strip())\n html.append('')\n html.append('<script>')\n for post in posts:\n if post.medias:\n gallery_id = f'gallery-blog-{post.date}-{post.daterank}'\n html.append(gallery_call(args, gallery_id))\n if post.dcim:\n gallery_id = f'gallery-dcim-{post.date}-{post.daterank}'\n html.append(gallery_call(args, gallery_id))\n html.append('</script>')\n html.append(END)\n return html\n\n\ndef print_html_to_stream(args, posts, title, stream, target):\n if target == 'regular':\n for line in compose_html_full(args, posts, title, target):\n print(line, file=stream)\n else:\n for line in compose_html_reduced(args, posts, title, target):\n print(line, file=stream)\n\n\ndef print_html(args, posts, title, html_name, target='regular'):\n assert target in ('regular', 'blogger')\n with io.StringIO() as f:\n print_html_to_stream(args, posts, title, f, target)\n html = f.getvalue()\n if html_name:\n if os.path.exists(html_name):\n with open(html_name, 'rt', encoding='utf-8') as f:\n html0 = f.read()\n if html == html0:\n return None\n with open(html_name, 'wt', encoding='utf-8') as f:\n f.write(html)\n return None\n else:\n return html\n\n\n<mask token>\n\n\ndef is_image_file(name):\n return os.path.splitext(name)[1].lower() in ('.jpg', '.jpeg', '.png',\n '.gif', '.bmp', '.webp', '.tif')\n\n\ndef is_video_file(name):\n return os.path.splitext(name)[1].lower() in ('.mp4', '.webm', '.mkv',\n '.flv', '.m4v', '.avi', '.wmv', '.mts', '.vob', '.divx')\n\n\ndef is_media(name):\n return is_image_file(name) or is_video_file(name)\n\n\ndef validate_date(datestr):\n try:\n datetime.datetime.strptime(datestr, '%Y%m%d')\n return True\n except ValueError:\n return False\n\n\ndef date_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})(?:\\\\D|$)', name, re.ASCII)):\n digits = match.group(1)\n if validate_date(digits):\n return digits\n return None\n\n\ndef date_from_item(filename):\n if (date := date_from_name(filename)):\n return date\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%Y%m%d')\n\n\ndef time_from_name(name):\n if (match := re.search('(?:\\\\D|^)(\\\\d{8})\\\\D(\\\\d{6})(?:\\\\D|$)', name,\n re.ASCII)):\n digits = match.group(2)\n hour, minute, second = int(digits[0:2]), int(digits[2:4]), int(digits\n [4:6])\n if 0 <= hour < 24 and 0 <= minute < 60 and 0 <= second < 60:\n return digits\n return None\n\n\ndef time_from_item(filename):\n if (time := time_from_name(filename)):\n return time\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%H%M%S')\n\n\n<mask token>\n\n\ndef get_image_info(filename):\n date = date_from_item(filename)\n time = time_from_item(filename)\n img = Image.open(filename)\n width, height = img.size\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n return (date, time, width, height, size\n ), f'{date} {time}, dim={width}x{height}, {size} MB'\n\n\ndef get_video_info(filename, info_fullname):\n if os.path.exists(info_fullname):\n with open(info_fullname) as f:\n info = f.readline().split()\n date, time, width, height, size, duration, fps = info[0], info[1], int(\n info[2]), int(info[3]), float(info[4]), int(info[5]), float(info[6]\n )\n formatted_info = format_video_info(date, time, width, height, size,\n duration, fps)\n return (date, time, width, height, size, duration, fps), formatted_info\n else:\n info, formatted_info = make_video_info(filename, info_fullname)\n with open(info_fullname, 'wt') as f:\n print(' '.join([str(_) for _ in info]), file=f)\n return info, formatted_info\n\n\ndef make_video_info(filename, info_fullname):\n date = date_from_item(filename)\n time = time_from_item(filename)\n command = [*FFPROBE_CMD.split(), filename]\n try:\n output = check_output(command, stderr=STDOUT).decode()\n width, height, fps, duration = parse_ffprobe_output(output)\n size = round(os.path.getsize(filename) / 1000000.0, 1)\n output = format_video_info(date, time, width, height, size,\n duration, fps)\n except CalledProcessError as e:\n output = e.output.decode()\n warning(output)\n raise\n return (date, time, width, height, size, duration, fps), output\n\n\ndef parse_ffprobe_output(ffprobe_output):\n match = re.match(\n '(\\\\d+),(\\\\d+),(\\\\d+)/(\\\\d+),(\\\\d+/\\\\d+).*\\\\s(\\\\d+\\\\.\\\\d+)',\n ffprobe_output, re.DOTALL)\n width = int(match.group(1))\n height = int(match.group(2))\n fps = round(int(match.group(3)) / int(match.group(4)), 1)\n duration = round(float(match.group(6)))\n return width, height, fps, duration\n\n\ndef format_video_info(date, time, width, height, size, duration, fps):\n return (\n f'{date} {time}, dim={width}x{height}, {format_duration(duration)}, fps={fps}, {size} MB'\n )\n\n\ndef format_duration(duration):\n mn = duration // 60\n sec = duration % 60\n if mn <= 59:\n return f'm:s={mn:02}:{sec:02}'\n else:\n hour = mn // 60\n mn = mn % 60\n return f'h:m:s={hour:02}:{mn:02}:{sec:02}'\n\n\ndef thumbname(name, key):\n return key + '-' + name + '.jpg'\n\n\ndef size_thumbnail(width, height, maxdim):\n if width >= height:\n return maxdim, int(round(maxdim * height / width))\n else:\n return int(round(maxdim * width / height)), maxdim\n\n\ndef make_thumbnail_image(args, image_name, thumb_name, size):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_image(image_name, thumb_name, size)\n\n\ndef create_thumbnail_image(image_name, thumb_name, size):\n imgobj = Image.open(image_name)\n if imgobj.mode != 'RGBA' and image_name.endswith('.jpg') and not (\n image_name.endswith('.gif') and imgobj.info.get('transparency')):\n imgobj = imgobj.convert('RGBA')\n imgobj.thumbnail(size, Image.LANCZOS)\n imgobj = imgobj.convert('RGB')\n imgobj.save(thumb_name)\n\n\ndef make_thumbnail_video(args, video_name, thumb_name, size, duration):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_video(args, video_name, thumb_name, size, duration)\n\n\n<mask token>\n\n\ndef create_thumbnail_video(args, filename, thumbname, size, duration):\n delay = min(duration - 1, args.thumbnails.thumbdelay)\n sizearg = '%dx%d' % size\n command = (\n 'ffmpeg -y -v error -itsoffset -%d -i \"%s\" -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s \"%s\"'\n )\n command = command % (delay, filename, sizearg, thumbname)\n result = os.system(command)\n try:\n img1 = Image.open(thumbname)\n except:\n warning('Unable to save thumbnail for', filename)\n return\n img2 = Image.open(io.BytesIO(base64.b64decode(VIDEO_ICON)))\n width, height = img1.size\n img1.paste(img2, (6, height - 20 - 6), None)\n img1.save(thumbname)\n\n\ndef make_thumbnail_subdir(args, subdir_name, thumb_name, size, items, thumbdir\n ):\n print('Making thumbnail:', thumb_name)\n create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir)\n\n\ndef create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir):\n\n def size_thumbnail(width, height, xmax, ymax):\n width2 = xmax\n height2 = int(round(xmax * height / width))\n if height2 < ymax:\n width2 = int(round(ymax * width / height))\n height2 = ymax\n return width2, height2\n thumblist = [os.path.basename(item.thumb) for item in items]\n widthnum, heightnum, width, height, offsetx, offsety = mosaic_geometry(size\n , thumblist)\n thumbnum = widthnum * heightnum\n img = Image.new('RGB', size, SUBDIR_BACKCOL)\n for ind, thumb in enumerate(thumblist[:min(thumbnum, len(thumblist))]):\n row = ind // widthnum\n col = ind % widthnum\n img2 = Image.open(os.path.join(thumbdir, thumb))\n w, h = size_thumbnail(*img2.size, width[col], height[row])\n cropdim = (w - width[col]) // 2, (h - height[row]) // 2, (w - width\n [col]) // 2 + width[col], (h - height[row]) // 2 + height[row]\n img2 = img2.resize((w, h), Image.LANCZOS)\n img2 = img2.crop(cropdim)\n img.paste(img2, (offsetx[col], offsety[row]))\n if os.path.exists(thumb_name):\n imgref = Image.open(thumb_name)\n byteio = io.BytesIO()\n img.save(byteio, 'JPEG')\n byteio.seek(0)\n imgnew = Image.open(byteio)\n diff = ImageChops.difference(imgnew, imgref)\n if diff.getbbox() is None:\n return\n img.save(thumb_name)\n\n\ndef mosaic_geometry(size, thumblist):\n if len(thumblist) == 1:\n widthnum = 1\n heightnum = 1\n elif len(thumblist) <= 3:\n widthnum = 1\n heightnum = 2\n elif len(thumblist) <= 8:\n widthnum = 2\n heightnum = 2\n else:\n widthnum = 3\n heightnum = 3\n if widthnum == 1:\n width = [size[0] - 2]\n else:\n width = [size[0] // widthnum - 2] * (widthnum - 1)\n width.append(size[0] - (1 + sum(width) + 2 * len(width) + 1))\n if heightnum == 1:\n height = [size[1] - 2]\n else:\n height = [size[1] // heightnum - 2] * (heightnum - 1)\n height.append(size[1] - (1 + sum(height) + 2 * len(height) + 1))\n offsetx = [1]\n for w in width[:-1]:\n offsetx.append(offsetx[-1] + w + 2)\n offsety = [1]\n for h in height[:-1]:\n offsety.append(offsety[-1] + h + 2)\n return widthnum, heightnum, width, height, offsetx, offsety\n\n\n<mask token>\n\n\ndef list_of_htmlfiles_in_items(itemlist):\n htmlist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n htmlist.append(item.htmname)\n htmlist.extend(list_of_htmlfiles_in_items(item.sublist))\n return htmlist\n\n\ndef list_of_thumbnails(posts, diary=False):\n thumblist = list()\n for post in posts:\n thumblist.extend(list_of_thumbnails_in_items(post.medias))\n if diary is False:\n thumblist.extend(list_of_thumbnails_in_items(post.dcim))\n return thumblist\n\n\ndef list_of_thumbnails_in_items(itemlist):\n thumblist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n thumblist.append(os.path.basename(item.thumb))\n thumblist.extend(list_of_thumbnails_in_items(item.sublist))\n else:\n thumblist.append(os.path.basename(item.thumb))\n return thumblist\n\n\ndef purge_htmlfiles(args, posts):\n \"\"\"\n Purge root dir from irrelevant html files\n \"\"\"\n htmlist = list_of_htmlfiles(args, posts)\n html_to_remove = list()\n for fullname in glob.glob(os.path.join(args.root, '*.htm*')):\n if fullname not in htmlist:\n html_to_remove.append(fullname)\n if len(html_to_remove) > args.thumbnails.threshold_htmlfiles:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(html_to_remove)} html files to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in html_to_remove:\n print('Removing html files', name)\n os.remove(name)\n\n\ndef purge_thumbnails(args, thumbdir, posts, diary=False):\n \"\"\"\n Purge thumbnail dir from irrelevant thumbnails\n \"\"\"\n thumblist = list_of_thumbnails(posts, diary)\n thumbs_to_remove = list()\n for fullname in glob.glob(os.path.join(thumbdir, '*.jpg')):\n if os.path.basename(fullname) not in thumblist:\n thumbs_to_remove.append(fullname)\n if len(thumbs_to_remove) > args.thumbnails.threshold_thumbs:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(\n f'{len(thumbs_to_remove)} thumbnails to remove. Continue [y|n]? '\n ).lower()\n if inpt == 'n':\n return\n for name in thumbs_to_remove:\n print('Removing thumbnail', name)\n os.remove(name)\n info_fullname = os.path.splitext(name)[0] + '.info'\n if os.path.exists(info_fullname):\n os.remove(info_fullname)\n\n\ndef is_media_within_dates(fullname, dates):\n if is_media(fullname):\n if type(dates) == tuple:\n return dates[0] <= date_from_item(fullname) <= dates[1]\n else:\n return True\n else:\n return False\n\n\ndef sorted_listdir(filelist):\n like_windows_explorer = True\n if not filelist:\n return filelist\n if like_windows_explorer:\n maxlen = max(len(os.path.splitext(name)[0]) for name in filelist)\n\n def keyfunc(name):\n root, ext = os.path.splitext(name.lower())\n return root.ljust(maxlen, ' ') + ext\n else:\n keyfunc = str.lower\n return sorted(filelist, key=keyfunc)\n\n\ndef list_of_files(sourcedir, recursive):\n \"\"\"\n Return the list of full paths for files in source directory\n \"\"\"\n result = list()\n if recursive is False:\n listdir = sorted_listdir(os.listdir(sourcedir))\n if '.nomedia' not in listdir:\n for basename in listdir:\n result.append(os.path.join(sourcedir, basename))\n else:\n for root, dirs, files in os.walk(sourcedir):\n if '.nomedia' not in files:\n for basename in sorted_listdir(files):\n result.append(os.path.join(root, basename))\n return result\n\n\ndef list_of_medias(args, sourcedir, recursive):\n \"\"\"\n Return the list of full paths for pictures and movies in source directory\n \"\"\"\n files = list_of_files(sourcedir, recursive)\n return [_ for _ in files if is_media_within_dates(_, args.dates)]\n\n\n<mask token>\n\n\ndef dispatch_post_items(list_of_post_items):\n subdirs = [_ for _ in list_of_post_items if type(_) is PostSubdir]\n medias = [_ for _ in list_of_post_items if type(_) is not PostSubdir]\n return subdirs, medias\n\n\ndef create_item(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n if os.path.isfile(media_fullname):\n if is_image_file(media_fullname):\n return create_item_image(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_video(args, media_fullname, sourcedir,\n thumbdir, key, thumbmax)\n else:\n return create_item_subdir(args, media_fullname, sourcedir, thumbdir,\n key, thumbmax)\n\n\ndef create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n try:\n info, infofmt = get_image_info(media_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_image(args, media_fullname, thumb_fullname, thumbsize)\n return PostImage(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except PIL.UnidentifiedImageError:\n warning('Unable to read image', media_fullname)\n return None\n\n\ndef create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax\n ):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n info_fullname = os.path.splitext(thumb_fullname)[0] + '.info'\n try:\n info, infofmt = get_video_info(media_fullname, info_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_video(args, media_fullname, thumb_fullname,\n thumbsize, duration=info[5])\n return PostVideo(None, media_fullname, '/'.join((args.thumbrep,\n thumb_basename)), thumbsize, infofmt)\n except CalledProcessError:\n warning('Unable to read video', media_fullname)\n return None\n\n\n<mask token>\n\n\ndef relative_name(media_fullname, sourcedir):\n \"\"\"\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest/OCT_20000112_000004.jpg\n -->\n deeper2_deepest_OCT_20000112_000004.jpg\n\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest\n -->\n deeper2_deepest\n \"\"\"\n x = os.path.relpath(media_fullname, sourcedir)\n x = x.replace('\\\\', '_').replace('/', '_').replace('#', '_')\n return x\n\n\ndef make_posts(args, dirname):\n if args.diary is True:\n if not args.sourcedir:\n return make_posts_from_diary(args)\n else:\n return make_posts_from_diary_and_dir(args)\n elif args.bydate is False:\n return make_posts_from_subdir(args, dirname)\n else:\n return make_posts_from_subdir_and_date(args, dirname)\n\n\ndef make_posts_from_diary(args):\n md_filename = os.path.join(args.root, 'index.md')\n if os.path.exists(md_filename):\n title, posts = parse_markdown(md_filename)\n else:\n error('File not found', md_filename)\n for post in posts:\n for media in post.medias:\n media_fullname = os.path.join(args.root, media.uri)\n item = create_item(args, media_fullname, args.root, args.\n thumbdir, 'post', 400)\n media.thumb = item.thumb\n media.thumbsize = item.thumbsize\n media.descr = item.descr\n return title, posts\n\n\ndef create_items_by_date(args, medias, posts):\n if args.dates == 'diary':\n required_dates = {post.date for post in posts}\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <=\n date <= date2}\n bydate = defaultdict(list)\n for media_fullname in medias:\n date = date_from_item(media_fullname)\n if date in required_dates:\n item = create_item(args, media_fullname, args.sourcedir, args.\n thumbdir, 'dcim', 300)\n if item:\n bydate[date].append(item)\n for date, liste in bydate.items():\n liste.sort(key=lambda item: time_from_item(item.uri))\n return bydate\n\n\n<mask token>\n\n\ndef make_posts_from_subdir(args, dirname):\n if args.bydir is False:\n medias_ext = list_of_medias(args, dirname, args.recursive)\n else:\n medias_ext = list_of_medias_ext(args, dirname)\n postmedias = list()\n for item in medias_ext:\n postmedia = create_item(args, item, args.sourcedir, args.thumbdir,\n 'dcim', 300)\n if postmedia is not None:\n postmedias.append(postmedia)\n post = Post(date='00000000', text='', medias=[])\n post.dcim = postmedias\n posts = [post]\n title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.\n sourcedir)[0]\n return title, posts\n\n\ndef make_posts_from_subdir_and_date(args, dirname):\n if args.bydir is False:\n medias = list_of_medias(args, dirname, args.recursive)\n subdirs = []\n else:\n medias_ext = list_of_medias_ext(args, dirname)\n medias = [_ for _ in medias_ext if is_media(_)]\n subdirs = [_ for _ in medias_ext if not is_media(_)]\n posts = list()\n items = list()\n for media_fullname in subdirs:\n item = create_item(args, media_fullname, args.sourcedir, args.\n thumbdir, 'dcim', 300)\n if item:\n items.append(item)\n if items:\n post = Post(date='00000000', text='', medias=[])\n post.dcim = items\n posts.append(post)\n bydate = create_items_by_date(args, medias, posts)\n for date in sorted(bydate):\n post = Post.from_date(date)\n post.dcim = bydate[post.date]\n posts.append(post)\n title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.\n sourcedir)[0]\n return title, posts\n\n\ndef create_gallery(args):\n title, posts = make_posts(args, args.sourcedir)\n print_html(args, posts, title, os.path.join(args.dest, args.rootname),\n 'regular')\n purge_htmlfiles(args, posts)\n if args.diary and not args.sourcedir:\n purge_thumbnails(args, args.thumbdir, posts, diary=True)\n else:\n purge_thumbnails(args, args.thumbdir, posts)\n\n\ndef create_diary(args):\n medias = list_of_medias(args, args.sourcedir, args.recursive)\n if args.dates == 'diary':\n assert 0\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <=\n date <= date2}\n title = args.sourcedir\n posts = list()\n for date in sorted(required_dates):\n posts.append(Post.from_date(date))\n os.makedirs(args.root, exist_ok=True)\n print_markdown(posts, title, os.path.join(args.root, 'index.md'))\n\n\ndef online_images_url(args):\n try:\n if args.urlblogger.startswith('http:') or args.urlblogger.startswith(\n 'https:'):\n with urlopen(args.urlblogger) as u:\n buffer = u.read()\n else:\n with open(args.urlblogger, 'rb') as f:\n buffer = f.read()\n except:\n error('Unable to read url', args.urlblogger)\n buffer = buffer.decode('utf-8')\n online_images = dict()\n for match in re.finditer('<div class=\"separator\"((?!<div).)*?</div>',\n buffer, flags=re.DOTALL):\n div_separator = match.group(0)\n div_separator = div_separator.replace(' ', '')\n elem_div = objectify.fromstring(div_separator)\n for elem_a in elem_div.iterchildren(tag='a'):\n href = elem_a.get('href')\n thumb = elem_a.img.get('src')\n online_images[os.path.basename(href)] = href, thumb\n online_videos = list()\n for match in re.finditer(\n '<iframe allowfullscreen=\"allowfullscreen\".*?</iframe>', buffer,\n flags=re.DOTALL):\n iframe = match.group(0)\n online_videos.append(iframe)\n return online_images, online_videos\n\n\ndef compare_image_buffers(imgbuf1, imgbuf2):\n \"\"\"\n return True if images read on file are identical, False otherwise\n \"\"\"\n with io.BytesIO(imgbuf1) as imgio1, io.BytesIO(imgbuf2) as imgio2:\n img1 = Image.open(imgio1)\n img2 = Image.open(imgio2)\n diff = ImageChops.difference(img1, img2)\n return not diff.getbbox()\n\n\ndef check_images(args, posts, online_images):\n result = True\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.basename in online_images:\n with open(os.path.join(args.root, media.uri), 'rb') as f:\n imgbuf1 = f.read()\n try:\n with urlopen(online_images[media.basename][0]) as u:\n imgbuf2 = u.read()\n except FileNotFoundError:\n print('File not found', online_images[media.\n basename][0])\n next\n if compare_image_buffers(imgbuf1, imgbuf2) is False:\n print('Files are different, upload', media.basename)\n elif 1:\n print('File already online', media.basename)\n else:\n print('File is absent, upload', media.basename)\n result = False\n elif type(media) is PostVideo:\n print('Video not checked', media.basename)\n else:\n assert False\n return result\n\n\ndef compose_blogger_html(args, title, posts, imgdata, online_videos):\n \"\"\" Compose html with blogger image urls\n \"\"\"\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.uri not in imgdata:\n print('Image missing: ', media.uri)\n else:\n img_url, resized_url = imgdata[media.uri]\n media.uri = img_url\n media.resized_url = resized_url\n elif type(media) is PostVideo:\n if not online_videos:\n print('Video missing: ', media.uri)\n else:\n media.iframe = online_videos[0]\n del online_videos[0]\n else:\n assert False\n return print_html(args, posts, title, '', target='blogger')\n\n\ndef prepare_for_blogger(args):\n \"\"\"\n Export blogger html to clipboard.\n If --full, export complete html, otherwise export html extract ready to\n paste into blogger edit mode.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n online_images, online_videos = online_images_url(args)\n if args.check_images and check_images(args, posts, online_images) is False:\n pass\n html = compose_blogger_html(args, title, posts, online_images,\n online_videos)\n if args.full is False:\n html = re.search('<body>(.*)?</body>', html, flags=re.DOTALL).group(1)\n html = re.sub('<script>.*?</script>', '', html, flags=re.DOTALL)\n html = STYLE.replace('%%', '%') + html\n if args.dest:\n with open(args.dest, 'wt', encoding='utf-8') as f:\n f.write(html)\n else:\n clipboard.copy(html)\n\n\ndef idempotence(args):\n \"\"\"\n For testing identity between a diary file and the fle obtained after reading\n and printing it. See testing.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n print_markdown(posts, title, os.path.join(args.dest, 'index.md'))\n\n\n<mask token>\n\n\nclass MyConfigParser(ConfigParser):\n \"\"\"Add input checking.\"\"\"\n\n def __init__(self):\n ConfigParser.__init__(self, inline_comment_prefixes=(';',))\n\n def error(self, section, entry):\n error('Missing or incorrect config value:', '[%s]%s' % (section, entry)\n )\n\n def getint(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getint(self, section, entry)\n else:\n return ConfigParser.getint(self, section, entry, raw=True,\n vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n def getboolean(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getboolean(self, section, entry)\n else:\n return ConfigParser.getboolean(self, section, entry, raw=\n True, vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n\ndef configfilename(params):\n return os.path.join(params.root, '.config.ini')\n\n\ndef createconfig(config_filename):\n with open(config_filename, 'wt') as f:\n f.writelines(CONFIG_DEFAULTS)\n\n\ndef read_config(params):\n config_filename = configfilename(params)\n try:\n if not os.path.exists(config_filename) or params.resetcfg:\n createconfig(config_filename)\n except:\n error('Error creating configuration file')\n try:\n getconfig(params, config_filename)\n except Exception as e:\n error('Error reading configuration file.', str(e), 'Use --resetcfg')\n\n\ndef getconfig(options, config_filename):\n\n\n class Section:\n pass\n options.source = Section()\n options.thumbnails = Section()\n options.photobox = Section()\n config = MyConfigParser()\n config.read(config_filename)\n options.source.sourcedir = config.get('source', 'sourcedir')\n options.source.bydir = config.getboolean('source', 'bydir')\n options.source.bydate = config.getboolean('source', 'bydate')\n options.source.diary = config.getboolean('source', 'diary')\n options.source.recursive = config.getboolean('source', 'recursive')\n options.source.dates = config.get('source', 'dates')\n options.source.github_pages = config.getboolean('source',\n 'github_pages', default=False)\n options.thumbnails.media_description = config.getboolean('thumbnails',\n 'media_description')\n options.thumbnails.subdir_caption = config.getboolean('thumbnails',\n 'subdir_caption')\n options.thumbnails.thumbdelay = config.getint('thumbnails', 'thumbdelay')\n options.thumbnails.threshold_thumbs = config.getint('thumbnails',\n 'threshold_thumbs')\n options.thumbnails.threshold_htmlfiles = config.getint('thumbnails',\n 'threshold_htmlfiles', default=3)\n options.photobox.loop = config.getboolean('photobox', 'loop')\n options.photobox.thumbs = config.getboolean('photobox', 'thumbs')\n options.photobox.autoplay = config.getboolean('photobox', 'autoplay')\n options.photobox.time = config.getint('photobox', 'time')\n options.photobox.zoomable = config.getboolean('photobox', 'zoomable')\n options.photobox.rotatable = config.getboolean('photobox', 'rotatable')\n options.photobox.wheelNextPrev = config.getboolean('photobox',\n 'wheelNextPrev')\n\n\ndef setconfig(cfgname, section, key, value):\n config = MyConfigParser()\n config.read(cfgname)\n config.set(section, key, value)\n with open(cfgname, 'wt') as configfile:\n config.write(configfile)\n\n\ndef setconfig_cmd(args):\n config_filename = configfilename(args)\n setconfig(config_filename, *args.setcfg)\n\n\ndef update_config(args):\n updates = ('sourcedir', args.sourcedir), ('bydir', BOOL[args.bydir]), (\n 'bydate', BOOL[args.bydate]), ('diary', BOOL[args.diary]), ('recursive'\n , BOOL[args.recursive]), ('dates', args.dates), ('github_pages',\n BOOL[args.github_pages])\n cfgname = configfilename(args)\n with open(cfgname) as f:\n cfglines = [_.strip() for _ in f.readlines()]\n for key, value in updates:\n for iline, line in enumerate(cfglines):\n if line.startswith(key):\n cfglines[iline] = f'{key} = {value}'\n break\n with open(cfgname, 'wt') as f:\n for line in cfglines:\n print(line, file=f)\n\n\ndef warning(*msg):\n print(colorama.Fore.YELLOW + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n\n\n<mask token>\n\n\ndef errorcode(msg):\n return ERRORS.splitlines().index(msg) + 1\n\n\ndef error(*msg):\n print(colorama.Fore.RED + colorama.Style.BRIGHT + ' '.join(msg),\n colorama.Style.RESET_ALL)\n sys.exit(errorcode(msg[0]))\n\n\n<mask token>\n\n\ndef parse_command_line(argstring):\n parser = argparse.ArgumentParser(description=None, usage=USAGE)\n agroup = parser.add_argument_group('Commands')\n xgroup = agroup.add_mutually_exclusive_group()\n xgroup.add_argument('--gallery', help='source in --sourcedir', action=\n 'store', metavar='<root-dir>')\n agroup.add_argument('--update', help=\n 'updates gallery with parameters in config file', action='store',\n metavar='<root-dir>')\n xgroup.add_argument('--create', help=\n 'create journal from medias in --sourcedir', action='store',\n metavar='<root-dir>')\n xgroup.add_argument('--resetcfg', help='reset config file to defaults',\n action='store', metavar='<root-dir>')\n xgroup.add_argument('--setcfg', help=argparse.SUPPRESS, action='store',\n nargs=4, metavar='<root-dir>')\n xgroup.add_argument('--idem', help=argparse.SUPPRESS, action='store',\n metavar='<root-dir>')\n xgroup.add_argument('--blogger', help=\n 'input md, html blogger ready in clipboard', action='store',\n metavar='<root-dir>')\n agroup = parser.add_argument_group('Parameters')\n agroup.add_argument('--bydir', help='organize gallery by subdirectory',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--bydate', help='organize gallery by date', action\n ='store', default=None, choices=BOOL)\n agroup.add_argument('--diary', help=\n 'organize gallery using markdown file diary', action='store',\n default=None, choices=BOOL)\n agroup.add_argument('--recursive', help='--sourcedir scans recursively',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--dates', help='dates interval', action='store',\n default=None)\n agroup.add_argument('--sourcedir', help='media directory', action=\n 'store', default=None)\n agroup.add_argument('--github_pages', help='github Pages compatibility',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--dest', help='output directory', action='store')\n agroup.add_argument('--forcethumb', help=\n 'force calculation of thumbnails', action='store_true', default=False)\n agroup.add_argument('--full', help=\n 'full html (versus blogger ready html)', action='store_true',\n default=False)\n agroup.add_argument('--check', dest='check_images', help=\n 'check availability of medias on blogger', action='store_true')\n agroup.add_argument('--url', dest='urlblogger', help='blogger post url',\n action='store')\n if argstring is None:\n print('Type \"galerie -h\" for help')\n sys.exit(1)\n else:\n args = parser.parse_args(argstring.split())\n if args.update and (args.bydir or args.bydate or args.diary or args.\n sourcedir or args.recursive or args.dates or args.github_pages):\n error('Incorrect parameters:',\n '--update cannot be used with creation parameters, use explicit command'\n )\n args.bydir = args.bydir == 'true'\n args.bydate = args.bydate == 'true'\n args.diary = args.diary == 'true'\n args.recursive = args.recursive == 'true'\n args.dates = 'source' if args.dates is None else args.dates\n args.github_pages = args.github_pages == 'true'\n args.root = (args.create or args.gallery or args.update or args.blogger or\n args.idem or args.resetcfg)\n if args.setcfg:\n args.root = args.setcfg[0]\n args.setcfg = args.setcfg[1:]\n return args\n\n\ndef setup_part1(args):\n \"\"\"\n Made before reading config file (config file located in args.root).\n Check and normalize root path.\n \"\"\"\n args.rootarg = args.root\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext == '':\n pass\n else:\n args.root = os.path.dirname(args.root)\n if args.root:\n args.root = os.path.abspath(args.root)\n if not os.path.isdir(args.root):\n if args.gallery:\n os.mkdir(args.root)\n else:\n error('Directory not found', args.root)\n\n\ndef setup_part2(args):\n \"\"\"\n Made after reading config file.\n Check for ffmpeg in path.\n Create .thumbnails dir if necessary and create .nomedia in it.\n Copy photobox file to destination dir.\n Handle priority between command line and config file.\n \"\"\"\n if args.update:\n args.sourcedir = args.source.sourcedir\n args.bydir = args.source.bydir\n args.bydate = args.source.bydate\n args.diary = args.source.diary\n args.recursive = args.source.recursive\n args.dates = args.source.dates\n args.github_pages = args.source.github_pages\n elif args.gallery:\n args.source.sourcedir = args.sourcedir\n args.source.bydir = args.bydir\n args.source.bydate = args.bydate\n args.source.diary = args.diary\n args.source.recursive = args.recursive\n args.source.dates = args.dates\n args.source.github_pages = args.github_pages\n update_config(args)\n if args.github_pages:\n args.html_suffix = '.html'\n else:\n args.html_suffix = '.htm'\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext:\n args.rootname = os.path.basename(args.rootarg)\n else:\n args.rootname = 'index' + args.html_suffix\n if args.sourcedir:\n args.sourcedir = os.path.abspath(args.sourcedir)\n if os.path.splitdrive(args.sourcedir)[0]:\n drive, rest = os.path.splitdrive(args.sourcedir)\n args.sourcedir = drive.upper() + rest\n if not os.path.isdir(args.sourcedir):\n error('Directory not found', args.sourcedir)\n elif args.gallery and args.diary is False and args.update is None:\n error('Directory not found', 'Use --sourcedir')\n if args.dest:\n args.dest = os.path.abspath(args.dest)\n if args.dest is None:\n args.dest = args.root\n if args.blogger and args.urlblogger is None:\n error('No blogger url (--url)')\n if args.gallery or args.update:\n for exe in ('ffmpeg', 'ffprobe'):\n try:\n check_output([exe, '-version'])\n except FileNotFoundError:\n error('File not found', exe)\n if args.github_pages:\n args.thumbrep = 'thumbnails'\n else:\n args.thumbrep = '.thumbnails'\n args.thumbdir = os.path.join(args.dest, args.thumbrep)\n if not os.path.exists(args.thumbdir):\n os.mkdir(args.thumbdir)\n open(os.path.join(args.thumbdir, '.nomedia'), 'a').close()\n favicondst = os.path.join(args.dest, 'favicon.ico')\n if not os.path.isfile(favicondst):\n faviconsrc = os.path.join(os.path.dirname(__file__), 'favicon.ico')\n shutil.copyfile(faviconsrc, favicondst)\n photoboxdir = os.path.join(args.dest, 'photobox')\n if not os.path.exists(photoboxdir):\n photoboxsrc = os.path.join(os.path.dirname(__file__), 'photobox')\n shutil.copytree(photoboxsrc, photoboxdir)\n if args.dates:\n if not (args.gallery or args.create):\n pass\n if args.dates == 'source':\n pass\n elif args.dates == 'diary':\n if args.create:\n error('Incorrect date format', args.dates)\n elif re.match('\\\\d+-\\\\d+', args.dates):\n date1, date2 = args.dates.split('-')\n if validate_date(date1) and validate_date(date2):\n args.dates = date1, date2\n else:\n error('Incorrect date format', args.dates)\n else:\n error('Incorrect date format', args.dates)\n\n\ndef main(argstring=None):\n colorama.init()\n args = parse_command_line(argstring)\n setup_part1(args)\n read_config(args)\n setup_part2(args)\n try:\n if args.gallery or args.update:\n create_gallery(args)\n elif args.create:\n create_diary(args)\n elif args.blogger:\n prepare_for_blogger(args)\n elif args.idem:\n idempotence(args)\n elif args.setcfg:\n setconfig_cmd(args)\n except KeyboardInterrupt:\n warning('Interrupted by user.')\n\n\n<mask token>\n",
"step-5": "\"\"\"\nMake html galleries from media directories. Organize by dates, by subdirs or by\nthe content of a diary file. The diary file is a markdown file organized by\ndates, each day described by a text and some medias (photos and movies).\n\nThe diary file can be exported to:\n* an html file with the text and subset of medias associated with each day,\n* the previous html file extended with all medias in the media directory,\n* an html file ready to import into Blogger.\n\"\"\"\n\n\nimport sys\nimport os\nimport argparse\nimport glob\nimport shutil\nimport re\nimport io\nimport bisect\nimport locale\nimport textwrap\nimport base64\nimport datetime\nimport urllib\n\nfrom configparser import ConfigParser\nfrom collections import defaultdict\nfrom subprocess import check_output, CalledProcessError, STDOUT\nfrom urllib.request import urlopen\n\nimport colorama\nimport clipboard\nimport PIL\nfrom PIL import Image, ImageChops\nfrom lxml import objectify\nimport markdown\n\n\nUSAGE = \"\"\"\ngalerie --gallery <root-dir> [--sourcedir <media-dir>]\n [--bydir true|false*]\n [--bydate true|false*]\n [--diary true|false*]\n [--recursive true|false*]\n [--dates source*|diary|<yyyymmdd-yyyymmdd>]\n [--github_pages true|false]\n [--dest <directory>]\n [--forcethumb]\ngalerie --update <root-dir>\ngalerie --create <root-dir> --sourcedir <media-dir>\n [--recursive true|false*]\n [--dates source*|<yyyymmdd-yyyymmdd>]\ngalerie --blogger <root-dir> --url <url>\n [--check]\n [--full]\n [--dest <filename>]\n\nNotes:\n - * gives default\n - all options can be abbreviated if there is no conflict with other options (--gallery --> --gal)\n\n\"\"\"\n\n\n# -- Post objects -------------------------------------------------------------\n\n\nCAPTION_IMAGE_STYLE = '''\\\n<style type=\"text/css\">\n span { display:inline-table; }\n </style>\\\n'''\n\nSTYLE = '''\\\n<style type=\"text/css\">\n p { margin-top:0px; margin-bottom:0px; }\n h3 { font-size: 100%%; font-weight: bold; margin-top:0px; margin-bottom:0px; }\n </style>\n'''\n\nSTART = f'''\\\n<html>\n\n<head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\" />\n <title>%s</title>\n <link rel=\"icon\" href=\"favicon.ico\" />\n <meta name=\"viewport\" content=\"width=device-width\">\n <link rel=\"stylesheet\" href=\"photobox/photobox.css\">\n <script src=\"photobox/jquery.min.js\"></script>\n <script src=\"photobox/jquery.photobox.js\"></script>\n{CAPTION_IMAGE_STYLE}\n{STYLE}\n</head>\n\n<body>\\\n'''\n\nBUTTONS = '''\\\n<button id=\"btn_full\" type=\"button\" style=\"position: fixed; width: 50px; top: 20px; right: 20px; background-color:white\">Full</button>\n<button id=\"btn_blog\" type=\"button\" style=\"position: fixed; width: 50px; top: 40px; right: 20px; background-color:white\">Diary</button>\n<button id=\"btn_text\" type=\"button\" style=\"position: fixed; width: 50px; top: 60px; right: 20px; background-color:white\">Text</button>\n\n<script>\n$('#btn_full').click(function() {\n $(\"[id^=gallery-blog]\").show();\n $(\"[id^=gallery-dcim]\").show();\n $(\"div.extra\").show();\n});\n$('#btn_text').click(function() {\n $(\"[id^=gallery-blog]\").hide();\n $(\"[id^=gallery-dcim]\").hide();\n $(\"div.extra\").hide();\n});\n$('#btn_blog').click(function() {\n $(\"[id^=gallery-blog]\").show();\n $(\"[id^=gallery-dcim]\").hide();\n $(\"div.extra\").hide();\n});\n</script>\n'''\n\nSUBDIR_BACKCOL = '#eee'\nEND = '</body>\\n</html>'\nSEP = '<hr color=\"#C0C0C0\" size=\"1\" />'\nIMGPOST = '<a href=\"%s\"><img src=\"%s\" width=\"%d\" height=\"%d\" title=\"%s\"/></a>'\nVIDPOST = '<a href=\"%s\" rel=\"video\"><img src=\"%s\" width=\"%d\" height=\"%d\" title=\"%s\"/></a>'\nIMGPOSTCAPTION = '''\\\n<span>\n<a href=\"%s\"><img src=%s width=\"%d\" height=\"%d\" title=\"%s\"/></a>\n<p>%s</p>\n</span>\n'''\nVIDPOSTCAPTION = '''\\\n<span>\n<a href=\"%s\" rel=\"video\"><img src=%s width=\"%d\" height=\"%d\" title=\"%s\"/></a>\n<p>%s</p>\n</span>\n'''\nIMGDCIM = '<a href=\"%s\"><img src=\"%s\" width=\"%d\" height=\"%d\" title=\"%s\"/></a>'\nVIDDCIM = '<a href=\"%s\" rel=\"video\"><img src=\"%s\" width=\"%d\" height=\"%d\" title=\"%s\"/></a>'\n\n# diminution de l'espace entre images, on utilise :\n# \"display: block;\", \"margin-bottom: 0em;\" et \"font-size: 0;\"\n# \"display: block;\" dans img : espacement correct ordi mais pas centré téléphone\n# \"display: block;\" dans a : ok\n\nDIRPOST = '<a href=\"%s\"><img src=\"%s\" width=\"%d\" height=\"%d\" style=\"border: 1px solid #C0C0C0;\" /></a>'\nDIRPOSTCAPTION = f'''\n<span style=\"background-color:{SUBDIR_BACKCOL}; margin-bottom: 8px; border: 1px solid #C0C0C0;\">\n<a href=\"%s\"><img src=\"%s\" width=\"%d\" height=\"%d\" style=\"border: 1px solid #C0C0C0;\" /></a>\n<p style=\"margin-left:2px;\">%s</p>\n</span>\n'''\nBIMGPAT = '''\\\n<div class=\"separator\" style=\"clear: both; text-align: center;\">\n<a href=\"%s\" style=\"clear: left; margin-bottom: 0em; margin-right: 1em; font-size: 0; display: block;\">\n<img border=\"0\" src=\"%s\" width=\"640\" />\n</a></div>\n'''\nCAPTION_PAT = '''\\\n<div class=\"separator\" style=\"clear: both; text-align: center;\">\n%s\n</div>\n'''\n\n\nclass Post:\n def __init__(self, date, text, medias):\n # date: yyyymmdd\n self.date = date\n self.text = text\n self.medias = medias\n self.dcim = []\n self.daterank = 0\n self.extra = False\n\n def __lt__(self, other):\n return self.date < other.date\n\n @classmethod\n def from_markdown(cls, post):\n m = re.match(r'\\[(\\d\\d\\d\\d/\\d\\d/\\d\\d)\\]\\n*', post[0])\n if m:\n date = m.group(1).replace('/', '')\n if not validate_date(date):\n error('Incorrect date value:', date)\n del post[0]\n else:\n error('No date in post', ' '.join(post))\n\n while post and not post[0].strip():\n del post[0]\n\n text = ''\n while post and not re.match(r'!?\\[\\]', post[0]):\n text += post[0]\n del post[0]\n\n # remove empty lines at end\n text = re.sub(r'\\n\\n$', '\\n', text)\n\n medias = list()\n while post and (match := re.match(r'!?\\[\\]\\((.*)\\)', post[0])):\n media = match.group(1)\n caption = None\n del post[0]\n if post and not re.match(r'!?\\[\\]', post[0]):\n caption = post[0].strip()\n del post[0]\n if match.group(0)[0] == '!':\n medias.append(PostImage(caption, media))\n else:\n medias.append(PostVideo(caption, media))\n\n return cls(date, text, medias)\n\n @classmethod\n def from_date(cls, date):\n dt = datetime.datetime.strptime(date, '%Y%m%d')\n datetext = dt.strftime(\"%A %d %B %Y\").capitalize()\n post = cls(date, text=datetext, medias=[])\n post.daterank = 1\n return post\n\n def to_html(self, args, target='regular'):\n if target == 'regular':\n if args.diary:\n return self.to_html_diary(args)\n else:\n return self.to_html_regular(args)\n if target == 'blogger':\n return self.to_html_blogger()\n\n def to_html_regular(self, args):\n html = list()\n if self.text:\n # possible with --bydate\n html.append(markdown.markdown(self.text))\n subdirs, dcim = dispatch_post_items(self.dcim)\n if self.dcim:\n html.append(SEP)\n for media in subdirs:\n html.append(media.to_html_dcim(args))\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n\n html.append(SEP)\n return html\n\n def to_html_diary(self, args):\n html = list()\n if self.extra:\n html.append('<div class=\"extra\">')\n\n if self.text:\n html.append(markdown.markdown(self.text))\n\n if self.medias:\n html.append(f'<div id=\"gallery-blog-{self.date}-{self.daterank}\">')\n for media in self.medias:\n html.append(media.to_html_post(args))\n html.append('</div>')\n\n _, dcim = dispatch_post_items(self.dcim)\n if dcim:\n html.append(f'<div id=\"gallery-dcim-{self.date}-{self.daterank}\">')\n html.append(SEP)\n for media in dcim:\n html.append(media.to_html_dcim(args))\n html.append('</div>')\n\n html.append(SEP)\n if self.extra:\n html.append('</div>')\n return html\n\n def to_html_blogger(self):\n html = list()\n html.append(markdown.markdown(self.text))\n for image in self.medias:\n html.append(image.to_html_blogger())\n html.append(SEP)\n return html\n\n\nclass PostItem:\n def __init__(self, caption, uri, thumb=None, thumbsize=None, descr=''):\n self.caption = caption\n self.uri = uri\n self.basename = os.path.basename(uri)\n self.thumb = thumb\n self.thumbsize = thumbsize\n self.descr = descr\n self.resized_url = None\n\n\nclass PostImage(PostItem):\n def to_markdown(self):\n if not self.caption:\n return '' % (self.uri,)\n else:\n return '\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return IMGPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return IMGPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize, descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return IMGDCIM % (relative_url(self.uri, args.root), self.thumb, *self.thumbsize, descr)\n\n def to_html_blogger(self):\n if not self.caption:\n return BIMGPAT % (self.uri, self.resized_url)\n else:\n return f'{BIMGPAT}\\n{CAPTION_PAT}' % (self.uri, self.resized_url, self.caption)\n\n\nclass PostVideo(PostItem):\n def to_markdown(self):\n if not self.caption:\n return '[](%s)' % (self.uri,)\n else:\n return '[](%s)\\n%s' % (self.uri, self.caption)\n\n def to_html_post(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n if not self.caption:\n return VIDPOST % (self.uri, self.thumb, *self.thumbsize, descr)\n else:\n return VIDPOSTCAPTION % (self.uri, self.thumb, *self.thumbsize, descr, self.caption)\n\n def to_html_dcim(self, args):\n descr = self.descr if args.thumbnails.media_description else ''\n return VIDDCIM % (relative_url(self.uri, args.root), self.thumb, *self.thumbsize, descr)\n\n def to_html_blogger(self):\n x = f'<p style=\"text-align: center;\">{self.iframe}</p>'\n if not self.caption:\n return x\n else:\n return f'%s\\n{CAPTION_PAT}' % (x, self.caption)\n\n\nclass PostSubdir(PostItem):\n def to_html_dcim(self, args):\n basename = os.path.basename(self.htmname)\n posts = self.posts\n title = self.caption\n print_html(args, posts, title, self.htmname)\n\n if not self.caption:\n return DIRPOST % (basename, self.thumb, *self.thumbsize)\n else:\n return DIRPOSTCAPTION % (basename, self.thumb, *self.thumbsize, self.caption)\n\n\ndef relative_url(path, root):\n \"\"\"\n returns a normalized url to path relative from root\n \"\"\"\n try:\n url = os.path.relpath(path, root)\n except:\n error('Unable to make a relative url:', url, root)\n\n url = url.replace('\\\\', '/') if os.sep == '\\\\' else url\n\n return urllib.parse.quote(url)\n\n\n# -- Markdown parser ----------------------------------------------------------\n\n\ndef parse_markdown(filename):\n \"\"\"\n Generate Post objects from markdown. Date must be present in each post and\n posts must be ordrered by date.\n \"\"\"\n if not os.path.exists(filename):\n error('File not found', filename)\n\n posts = list()\n with open(filename, encoding='utf-8') as f:\n line = next(f)\n if line.startswith('# '):\n title = line[2:].strip()\n record = []\n next(f)\n else:\n title = None\n record = [line]\n for line in f:\n if not line.startswith('___'):\n record.append(line)\n else:\n posts.append(Post.from_markdown(record))\n record = []\n\n # set rank of posts in date\n daterank = defaultdict(int)\n for post in posts:\n daterank[post.date] += 1\n post.daterank = daterank[post.date]\n\n # check post order\n for post1, post2 in zip(posts[:-1], posts[1:]):\n if post1.date > post2.date:\n error('Posts are not ordered', f'{post1.date} > {post2.date}')\n\n return title, posts\n\n\n# -- Markdown printer ---------------------------------------------------------\n\n\ndef print_markdown(posts, title, fullname):\n with open(fullname, 'wt', encoding='utf-8') as fdst:\n print(f'# {title}\\n', file=fdst)\n for post in posts:\n date = f'[{post.date[0:4]}/{post.date[4:6]}/{post.date[6:8]}]'\n print(date, file=fdst)\n if post.text:\n print(file=fdst)\n for line in post.text.splitlines():\n if not line:\n print(file=fdst)\n else:\n for chunk in textwrap.wrap(line, width=78):\n print(chunk, file=fdst)\n if post.medias:\n print(file=fdst)\n for media in post.medias:\n print(media.to_markdown(), file=fdst)\n print('______', file=fdst)\n\n\n# -- html printer -------------------------------------------------------------\n\n\ndef compose_html_reduced(args, posts, title, target):\n html = list()\n html.append(START % title)\n\n for post in posts:\n for line in post.to_html(args, target):\n html.append(line.strip())\n html.append('')\n\n html.append(END)\n return html\n\n\ndef compose_html_full(args, posts, title, target):\n html = list()\n html.append(START % title)\n\n if args.diary:\n html.append(BUTTONS)\n\n for post in posts:\n for line in post.to_html(args, target):\n html.append(line.strip())\n html.append('')\n\n html.append('<script>')\n for post in posts:\n if post.medias:\n gallery_id = f'gallery-blog-{post.date}-{post.daterank}'\n html.append(gallery_call(args, gallery_id))\n if post.dcim:\n gallery_id = f'gallery-dcim-{post.date}-{post.daterank}'\n html.append(gallery_call(args, gallery_id))\n html.append('</script>')\n\n html.append(END)\n return html\n\n\ndef print_html_to_stream(args, posts, title, stream, target):\n if target == 'regular':\n for line in compose_html_full(args, posts, title, target):\n print(line, file=stream)\n else:\n for line in compose_html_reduced(args, posts, title, target):\n print(line, file=stream)\n\n\ndef print_html(args, posts, title, html_name, target='regular'):\n assert target in ('regular', 'blogger')\n with io.StringIO() as f:\n print_html_to_stream(args, posts, title, f, target)\n html = f.getvalue()\n\n if html_name:\n if os.path.exists(html_name):\n # test if the generated html is identical to the one already on disk\n with open(html_name, 'rt', encoding='utf-8') as f:\n html0 = f.read()\n if html == html0:\n return None\n with open(html_name, 'wt', encoding='utf-8') as f:\n f.write(html)\n return None\n else:\n return html\n\n\nGALLERYCALL = \"\"\"\n$('#%s').photobox('a', {\nloop:%s,\nthumbs:%s,\nautoplay:%s,\ntime:%d,\nzoomable:%s ,\nrotatable:%s,\nwheelNextPrev:%s\n});\n\"\"\"\n\n\ndef gallery_call(args, gallery_id):\n return GALLERYCALL.replace('\\n', '') % (\n gallery_id,\n str(args.photobox.loop).lower(),\n str(args.photobox.thumbs).lower(),\n str(args.photobox.autoplay).lower(),\n args.photobox.time,\n str(args.photobox.zoomable).lower(),\n str(args.photobox.rotatable).lower(),\n str(args.photobox.wheelNextPrev).lower(),\n )\n\n\n# -- Media description --------------------------------------------------------\n\n\ndef is_image_file(name):\n return os.path.splitext(name)[1].lower() in (\n '.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp', '.tif'\n )\n\n\ndef is_video_file(name):\n return os.path.splitext(name)[1].lower() in (\n '.mp4', '.webm', '.mkv', '.flv', '.m4v', '.avi', '.wmv', '.mts', '.vob', '.divx'\n )\n\n\ndef is_media(name):\n return is_image_file(name) or is_video_file(name)\n\n\ndef validate_date(datestr):\n # datestr = yyyymmdd\n try:\n datetime.datetime.strptime(datestr, '%Y%m%d')\n return True\n except ValueError:\n return False\n\n\ndef date_from_name(name):\n # heuristics\n if match := re.search(r'(?:\\D|^)(\\d{8})(?:\\D|$)', name, re.ASCII):\n digits = match.group(1)\n if validate_date(digits):\n return digits\n return None\n\n\ndef date_from_item(filename):\n if date := date_from_name(filename):\n return date\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%Y%m%d')\n\n\ndef time_from_name(name):\n # heuristics\n if match := re.search(r'(?:\\D|^)(\\d{8})\\D(\\d{6})(?:\\D|$)', name, re.ASCII):\n digits = match.group(2)\n hour, minute, second = int(digits[0:2]), int(digits[2:4]), int(digits[4:6])\n if 0 <= hour < 24 and 0 <= minute < 60 and 0 <= second < 60:\n return digits\n return None\n\n\ndef time_from_item(filename):\n if time := time_from_name(filename):\n return time\n else:\n timestamp = os.path.getmtime(filename)\n return datetime.datetime.fromtimestamp(timestamp).strftime('%H%M%S')\n\n\nFFPROBE_CMD = '''\\\n ffprobe -v error\n -select_streams v:0\n -show_entries stream=width,height,avg_frame_rate,r_frame_rate:format=duration\n -of csv=p=0\n'''\n\n\ndef get_image_info(filename):\n date = date_from_item(filename)\n time = time_from_item(filename)\n img = Image.open(filename)\n width, height = img.size\n size = round(os.path.getsize(filename) / 1e6, 1)\n return (date, time, width, height, size), f'{date} {time}, dim={width}x{height}, {size} MB'\n\n\ndef get_video_info(filename, info_fullname):\n if os.path.exists(info_fullname):\n with open(info_fullname) as f:\n info = f.readline().split()\n date, time, width, height, size, duration, fps = info[0], info[1], int(info[2]), int(info[3]), float(info[4]), int(info[5]), float(info[6])\n formatted_info = format_video_info(date, time, width, height, size, duration, fps)\n return (date, time, width, height, size, duration, fps), formatted_info\n else:\n info, formatted_info = make_video_info(filename, info_fullname)\n with open(info_fullname, 'wt') as f:\n print(' '.join([str(_) for _ in info]), file=f)\n return info, formatted_info\n\n\ndef make_video_info(filename, info_fullname):\n # ffmpeg must be in path\n date = date_from_item(filename)\n time = time_from_item(filename)\n command = [*FFPROBE_CMD.split(), filename]\n try:\n output = check_output(command, stderr=STDOUT).decode()\n width, height, fps, duration = parse_ffprobe_output(output)\n size = round(os.path.getsize(filename) / 1e6, 1)\n output = format_video_info(date, time, width, height, size, duration, fps)\n except CalledProcessError as e:\n output = e.output.decode()\n warning(output)\n raise\n return (date, time, width, height, size, duration, fps), output\n\n\ndef parse_ffprobe_output(ffprobe_output):\n # parse first channel data and last line for duration\n match = re.match(r'(\\d+),(\\d+),(\\d+)/(\\d+),(\\d+/\\d+).*\\s(\\d+\\.\\d+)', ffprobe_output, re.DOTALL)\n width = int(match.group(1))\n height = int(match.group(2))\n fps = round(int(match.group(3)) / int(match.group(4)), 1)\n duration = round(float(match.group(6)))\n return width, height, fps, duration\n\n\ndef format_video_info(date, time, width, height, size, duration, fps):\n return f'{date} {time}, dim={width}x{height}, {format_duration(duration)}, fps={fps}, {size} MB'\n\n\ndef format_duration(duration):\n mn = duration // 60\n sec = duration % 60\n if mn <= 59:\n return f'm:s={mn:02}:{sec:02}'\n else:\n hour = mn // 60\n mn = mn % 60\n return f'h:m:s={hour:02}:{mn:02}:{sec:02}'\n\n\n# -- Thumbnails (image and video) ---------------------------------------------\n\n\ndef thumbname(name, key):\n return key + '-' + name + '.jpg'\n\n\ndef size_thumbnail(width, height, maxdim):\n if width >= height:\n return maxdim, int(round(maxdim * height / width))\n else:\n return int(round(maxdim * width / height)), maxdim\n\n\ndef make_thumbnail_image(args, image_name, thumb_name, size):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_image(image_name, thumb_name, size)\n\n\ndef create_thumbnail_image(image_name, thumb_name, size):\n imgobj = Image.open(image_name)\n\n if (imgobj.mode != 'RGBA'\n and image_name.endswith('.jpg')\n and not (image_name.endswith('.gif') and imgobj.info.get('transparency'))\n ):\n imgobj = imgobj.convert('RGBA')\n\n imgobj.thumbnail(size, Image.LANCZOS)\n imgobj = imgobj.convert('RGB')\n imgobj.save(thumb_name)\n\n\ndef make_thumbnail_video(args, video_name, thumb_name, size, duration):\n if os.path.exists(thumb_name) and args.forcethumb is False:\n pass\n else:\n print('Making thumbnail:', thumb_name)\n create_thumbnail_video(args, video_name, thumb_name, size, duration)\n\n\n# base64 video.png\nVIDEO_ICON = '''\\\niVBORw0KGgoAAAANSUhEUgAAABgAAAAUCAAAAACy3qJfAAAA4UlEQVR4\n2m1QoRbCMAy88SaK69xscfuEWiS4SZBIcCCRfAL8An8AcnJzTOJSWdxwzJXSPUoHRPQlueYuucigxm\n9kDGaMf8AjopGcYn8LmmyLoihBWBiThb+5MTuUsc3aL56upneZ9sByAIg8Z8BEn96EeZ65iU7DvmbP\nPxqDcH6p1swXBC4l6yZskACkTN1WrQr2SlIFhTtgqeZa+zsOogLXegvEocZ5c/W5BcoVNNCg3hSudV\n/hEh4ofw6cEb00Km8i0dpRDUXfKiaQOEAdrUDo4dFp9C33jjaRac9/gDF/AlplVYtfWGCjAAAAAElF\nTkSuQmCC'''\n\n\ndef create_thumbnail_video(args, filename, thumbname, size, duration):\n # ffmpeg must be in path\n delay = min(duration - 1, args.thumbnails.thumbdelay)\n sizearg = '%dx%d' % size\n command = 'ffmpeg -y -v error -itsoffset -%d -i \"%s\" -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s \"%s\"'\n command = command % (delay, filename, sizearg, thumbname)\n result = os.system(command)\n\n # add a movie icon to the thumbnail to identify videos\n try:\n img1 = Image.open(thumbname)\n except:\n # ffmpeg was unable to save thumbnail\n warning('Unable to save thumbnail for', filename)\n return\n img2 = Image.open(io.BytesIO(base64.b64decode(VIDEO_ICON)))\n width, height = img1.size\n img1.paste(img2, (6, height - 20 - 6), None)\n img1.save(thumbname)\n\n\ndef make_thumbnail_subdir(args, subdir_name, thumb_name, size, items, thumbdir):\n # subdir thumbnails are always created as they depend on the content of the\n # directory\n print('Making thumbnail:', thumb_name)\n create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir)\n\n\ndef create_thumbnail_subdir(subdir_name, thumb_name, size, items, thumbdir):\n\n def size_thumbnail(width, height, xmax, ymax):\n width2 = xmax\n height2 = int(round(xmax * height / width))\n if height2 < ymax:\n width2 = int(round(ymax * width / height))\n height2 = ymax\n return width2, height2\n\n thumblist = [os.path.basename(item.thumb) for item in items]\n widthnum, heightnum, width, height, offsetx, offsety = mosaic_geometry(size, thumblist)\n thumbnum = widthnum * heightnum\n img = Image.new('RGB', size, SUBDIR_BACKCOL)\n\n for ind, thumb in enumerate(thumblist[:min(thumbnum, len(thumblist))]):\n row = ind // widthnum\n col = ind % widthnum\n img2 = Image.open(os.path.join(thumbdir, thumb))\n w, h = size_thumbnail(*img2.size, width[col], height[row])\n cropdim = ((w - width[col]) // 2, (h - height[row]) // 2,\n (w - width[col]) // 2 + width[col], (h - height[row]) // 2 + height[row])\n img2 = img2.resize((w, h), Image.LANCZOS)\n img2 = img2.crop(cropdim)\n img.paste(img2, (offsetx[col], offsety[row]))\n\n if os.path.exists(thumb_name):\n # test if the generated thumbnail is identical to the one already on disk\n imgref = Image.open(thumb_name)\n\n # must save and reload before comparing\n byteio = io.BytesIO()\n img.save(byteio, \"JPEG\")\n byteio.seek(0)\n imgnew = Image.open(byteio)\n\n diff = ImageChops.difference(imgnew, imgref)\n if diff.getbbox() is None:\n return\n\n img.save(thumb_name)\n\n\ndef mosaic_geometry(size, thumblist):\n if len(thumblist) == 1:\n widthnum = 1\n heightnum = 1\n elif len(thumblist) <= 3:\n widthnum = 1\n heightnum = 2\n elif len(thumblist) <= 8:\n widthnum = 2\n heightnum = 2\n else:\n widthnum = 3\n heightnum = 3\n\n if widthnum == 1:\n width = [size[0] - 2]\n else:\n width = [size[0] // widthnum - 2] * (widthnum - 1)\n width.append(size[0] - (1 + sum(width) + 2 * len(width) + 1))\n\n if heightnum == 1:\n height = [size[1] - 2]\n else:\n height = [size[1] // heightnum - 2] * (heightnum - 1)\n height.append(size[1] - (1 + sum(height) + 2 * len(height) + 1))\n\n offsetx = [1]\n for w in width[:-1]:\n offsetx.append(offsetx[-1] + w + 2)\n\n offsety = [1]\n for h in height[:-1]:\n offsety.append(offsety[-1] + h + 2)\n\n return widthnum, heightnum, width, height, offsetx, offsety\n\n\ndef list_of_htmlfiles(args, posts):\n htmlist = list()\n htmlist.append(os.path.join(args.dest, args.rootname))\n for post in posts:\n htmlist.extend(list_of_htmlfiles_in_items(post.dcim))\n return htmlist\n\n\ndef list_of_htmlfiles_in_items(itemlist):\n htmlist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n htmlist.append(item.htmname)\n htmlist.extend(list_of_htmlfiles_in_items(item.sublist))\n return htmlist\n\n\ndef list_of_thumbnails(posts, diary=False):\n thumblist = list()\n for post in posts:\n thumblist.extend(list_of_thumbnails_in_items(post.medias))\n if diary is False:\n thumblist.extend(list_of_thumbnails_in_items(post.dcim))\n return thumblist\n\n\ndef list_of_thumbnails_in_items(itemlist):\n thumblist = list()\n for item in itemlist:\n if type(item) == PostSubdir:\n thumblist.append(os.path.basename(item.thumb))\n thumblist.extend(list_of_thumbnails_in_items(item.sublist))\n else:\n thumblist.append(os.path.basename(item.thumb))\n return thumblist\n\n\ndef purge_htmlfiles(args, posts):\n \"\"\"\n Purge root dir from irrelevant html files\n \"\"\"\n htmlist = list_of_htmlfiles(args, posts)\n html_to_remove = list()\n for fullname in glob.glob(os.path.join(args.root, '*.htm*')):\n if fullname not in htmlist:\n html_to_remove.append(fullname)\n\n if len(html_to_remove) > args.thumbnails.threshold_htmlfiles:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(f'{len(html_to_remove)} html files to remove. Continue [y|n]? ').lower()\n if inpt == 'n':\n return\n\n for name in html_to_remove:\n print('Removing html files', name)\n os.remove(name)\n\n\ndef purge_thumbnails(args, thumbdir, posts, diary=False):\n \"\"\"\n Purge thumbnail dir from irrelevant thumbnails\n \"\"\"\n thumblist = list_of_thumbnails(posts, diary)\n thumbs_to_remove = list()\n for fullname in glob.glob(os.path.join(thumbdir, '*.jpg')):\n if os.path.basename(fullname) not in thumblist:\n thumbs_to_remove.append(fullname)\n\n if len(thumbs_to_remove) > args.thumbnails.threshold_thumbs:\n inpt = 'x'\n while inpt not in 'yn':\n inpt = input(f'{len(thumbs_to_remove)} thumbnails to remove. Continue [y|n]? ').lower()\n if inpt == 'n':\n return\n\n for name in thumbs_to_remove:\n print('Removing thumbnail', name)\n os.remove(name)\n info_fullname = os.path.splitext(name)[0] + '.info'\n if os.path.exists(info_fullname):\n os.remove(info_fullname)\n\n\n# -- List of medias helpers ---------------------------------------------------\n\n\ndef is_media_within_dates(fullname, dates):\n if is_media(fullname):\n if type(dates) == tuple:\n return dates[0] <= date_from_item(fullname) <= dates[1]\n else:\n return True\n else:\n return False\n\n\ndef sorted_listdir(filelist):\n like_windows_explorer = True\n\n if not filelist:\n return filelist\n\n if like_windows_explorer:\n maxlen = max(len(os.path.splitext(name)[0]) for name in filelist)\n def keyfunc(name):\n root, ext = os.path.splitext(name.lower())\n return root.ljust(maxlen, ' ') + ext\n else:\n keyfunc = str.lower\n\n return sorted(filelist, key=keyfunc)\n\n\ndef list_of_files(sourcedir, recursive):\n \"\"\"\n Return the list of full paths for files in source directory\n \"\"\"\n result = list()\n if recursive is False:\n listdir = sorted_listdir(os.listdir(sourcedir))\n if '.nomedia' not in listdir:\n for basename in listdir:\n result.append(os.path.join(sourcedir, basename))\n else:\n for root, dirs, files in os.walk(sourcedir):\n if '.nomedia' not in files:\n for basename in sorted_listdir(files):\n result.append(os.path.join(root, basename))\n return result\n\n\ndef list_of_medias(args, sourcedir, recursive):\n \"\"\"\n Return the list of full paths for pictures and movies in source directory\n \"\"\"\n files = list_of_files(sourcedir, recursive)\n return [_ for _ in files if is_media_within_dates(_, args.dates)]\n\n\ndef list_of_medias_ext(args, sourcedir):\n \"\"\"\n Return the list of full paths for pictures and movies in source directory\n plus subdirectories containing media\n \"\"\"\n result = list()\n listdir = sorted_listdir(os.listdir(sourcedir))\n if '.nomedia' not in listdir:\n for basename in listdir:\n fullname = os.path.join(sourcedir, basename)\n if os.path.isdir(fullname) and basename != '$RECYCLE.BIN' and contains_media(args, fullname):\n result.append(fullname)\n else:\n if is_media_within_dates(fullname, args.dates):\n result.append(fullname)\n return result\n\n\ndef contains_media(args, dirname):\n for root, dirs, files in os.walk(dirname):\n if '.nomedia' not in files:\n for basename in files:\n if is_media_within_dates(os.path.join(root, basename), args.dates):\n return True\n else:\n return False\n\n\ndef dispatch_post_items(list_of_post_items):\n subdirs = [_ for _ in list_of_post_items if type(_) is PostSubdir]\n medias = [_ for _ in list_of_post_items if type(_) is not PostSubdir]\n return subdirs, medias\n\n\n# -- Creation of gallery element ----------------------------------------------\n\n\ndef create_item(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n if os.path.isfile(media_fullname):\n if is_image_file(media_fullname):\n return create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax)\n else:\n return create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax)\n else:\n return create_item_subdir(args, media_fullname, sourcedir, thumbdir, key, thumbmax)\n\n\ndef create_item_image(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n\n try:\n info, infofmt = get_image_info(media_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_image(args, media_fullname, thumb_fullname, thumbsize)\n return PostImage(None, media_fullname, '/'.join((args.thumbrep, thumb_basename)),\n thumbsize, infofmt)\n except PIL.UnidentifiedImageError:\n # corrupted image\n warning('Unable to read image', media_fullname)\n return None\n\n\ndef create_item_video(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n info_fullname = os.path.splitext(thumb_fullname)[0] + '.info'\n\n try:\n info, infofmt = get_video_info(media_fullname, info_fullname)\n infofmt = media_basename + ': ' + infofmt\n thumbsize = size_thumbnail(info[2], info[3], thumbmax)\n make_thumbnail_video(args, media_fullname, thumb_fullname, thumbsize, duration=info[5])\n return PostVideo(None, media_fullname, '/'.join((args.thumbrep, thumb_basename)),\n thumbsize, infofmt)\n except CalledProcessError:\n # corrupted video\n warning('Unable to read video', media_fullname)\n return None\n\n\ndef create_item_subdir(args, media_fullname, sourcedir, thumbdir, key, thumbmax):\n media_basename = os.path.basename(media_fullname)\n media_relname = relative_name(media_fullname, sourcedir)\n thumb_basename = thumbname(media_relname, key)\n thumb_fullname = os.path.join(thumbdir, thumb_basename)\n\n info, infofmt = None, None\n thumbsize = (thumbmax, int(round(thumbmax / 640 * 480)))\n\n medias_ext = list_of_medias_ext(args, media_fullname)\n if not medias_ext:\n return None\n\n item = PostSubdir(None, media_fullname, '/'.join((args.thumbrep, thumb_basename)),\n thumbsize, infofmt)\n item.htmname = os.path.join(os.path.dirname(thumbdir), media_relname + args.html_suffix)\n if args.thumbnails.subdir_caption:\n item.caption = media_basename\n else:\n item.caption = ''\n\n _, posts = make_posts(args, media_fullname)\n item.posts = posts\n items = [item for post in posts for item in post.dcim]\n item.sublist = items\n\n make_thumbnail_subdir(args, media_fullname, thumb_fullname, thumbsize, items, thumbdir)\n return item\n\n\ndef relative_name(media_fullname, sourcedir):\n \"\"\"\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest/OCT_20000112_000004.jpg\n -->\n deeper2_deepest_OCT_20000112_000004.jpg\n\n /Gilles/Dev/journal/tests/subdir/deeper2/deepest\n -->\n deeper2_deepest\n \"\"\"\n x = os.path.relpath(media_fullname, sourcedir)\n x = x.replace('\\\\', '_').replace('/', '_').replace('#', '_')\n return x\n\n\n# -- Creation of posts --------------------------------------------------------\n\n\ndef make_posts(args, dirname):\n if args.diary is True:\n if not args.sourcedir:\n return make_posts_from_diary(args)\n else:\n return make_posts_from_diary_and_dir(args)\n elif args.bydate is False:\n return make_posts_from_subdir(args, dirname)\n else:\n return make_posts_from_subdir_and_date(args, dirname)\n\n\ndef make_posts_from_diary(args):\n md_filename = os.path.join(args.root, 'index.md')\n if os.path.exists(md_filename):\n title, posts = parse_markdown(md_filename)\n else:\n error('File not found', md_filename)\n\n for post in posts:\n for media in post.medias:\n media_fullname = os.path.join(args.root, media.uri)\n item = create_item(args, media_fullname, args.root, args.thumbdir, 'post', 400)\n media.thumb = item.thumb\n media.thumbsize = item.thumbsize\n media.descr = item.descr\n\n return title, posts\n\n\ndef create_items_by_date(args, medias, posts):\n # list of required dates\n if args.dates == 'diary':\n required_dates = {post.date for post in posts}\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <= date <= date2}\n\n bydate = defaultdict(list)\n for media_fullname in medias:\n date = date_from_item(media_fullname)\n if date in required_dates:\n item = create_item(args, media_fullname, args.sourcedir, args.thumbdir, 'dcim', 300)\n if item:\n bydate[date].append(item)\n\n for date, liste in bydate.items():\n liste.sort(key=lambda item: time_from_item(item.uri))\n\n return bydate\n\n\ndef make_posts_from_diary_and_dir(args):\n title, posts = make_posts_from_diary(args)\n\n # list of all pictures and movies\n medias = list_of_medias(args, args.sourcedir, args.recursive)\n\n bydate = create_items_by_date(args, medias, posts)\n\n # make list of extra dates (not in posts)\n extradates = set(bydate) - {post.date for post in posts}\n\n # complete posts with extra dates\n for date in extradates:\n post = Post.from_date(date)\n post.extra = True\n bisect.insort(posts, post)\n\n # several posts can have the same date, only the first one is completed with dcim medias\n for post in posts:\n if post.date in bydate and post.daterank == 1:\n post.dcim = bydate[post.date]\n\n return title, posts\n\n\ndef make_posts_from_subdir(args, dirname):\n # list of pictures and movies plus subdirectories\n if args.bydir is False:\n medias_ext = list_of_medias(args, dirname, args.recursive)\n else:\n medias_ext = list_of_medias_ext(args, dirname)\n\n #required_dates = get_required_dates(args, medias_ext, posts=None)\n #medias_ext_bis = []\n #for media in medias_ext:\n # if complies_with_required_dates(media):\n # medias_ext_bis.append(media)\n\n # complete posts\n postmedias = list()\n for item in medias_ext:\n postmedia = create_item(args, item, args.sourcedir, args.thumbdir, 'dcim', 300)\n if postmedia is not None:\n postmedias.append(postmedia)\n\n post = Post(date='00000000', text='', medias=[])\n post.dcim = postmedias\n posts = [post]\n title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.sourcedir)[0]\n\n return title, posts\n\n\ndef make_posts_from_subdir_and_date(args, dirname):\n # list of all pictures and movies\n if args.bydir is False:\n medias = list_of_medias(args, dirname, args.recursive)\n subdirs = []\n else:\n medias_ext = list_of_medias_ext(args, dirname)\n medias = [_ for _ in medias_ext if is_media(_)]\n subdirs = [_ for _ in medias_ext if not is_media(_)]\n\n # create list of posts with a single post containing all subdirs\n posts = list()\n items = list()\n for media_fullname in subdirs:\n item = create_item(args, media_fullname, args.sourcedir, args.thumbdir, 'dcim', 300)\n if item:\n items.append(item)\n if items:\n post = Post(date='00000000', text='', medias=[])\n post.dcim = items\n posts.append(post)\n\n bydate = create_items_by_date(args, medias, posts)\n\n # add dates\n for date in sorted(bydate):\n post = Post.from_date(date)\n post.dcim = bydate[post.date]\n posts.append(post)\n title = os.path.basename(args.sourcedir) or os.path.splitdrive(args.sourcedir)[0]\n\n return title, posts\n\n\n# -- Creation of html page from directory tree --------------------------------\n\n\ndef create_gallery(args):\n title, posts = make_posts(args, args.sourcedir)\n print_html(args, posts, title, os.path.join(args.dest, args.rootname), 'regular')\n purge_htmlfiles(args, posts)\n if args.diary and not args.sourcedir:\n purge_thumbnails(args, args.thumbdir, posts, diary=True)\n else:\n purge_thumbnails(args, args.thumbdir, posts)\n\n\n# -- Creation of diary from medias --------------------------------------------\n\n\ndef create_diary(args):\n # list of all pictures and movies\n medias = list_of_medias(args, args.sourcedir, args.recursive)\n\n # list of required dates\n if args.dates == 'diary':\n assert 0\n else:\n required_dates = {date_from_item(media) for media in medias}\n if type(args.dates) == tuple:\n date1, date2 = args.dates\n required_dates = {date for date in required_dates if date1 <= date <= date2}\n\n title = args.sourcedir\n posts = list()\n for date in sorted(required_dates):\n posts.append(Post.from_date(date))\n\n os.makedirs(args.root, exist_ok=True)\n print_markdown(posts, title, os.path.join(args.root, 'index.md'))\n\n\n# -- Export to blogger---------------------------------------------------------\n\n\ndef online_images_url(args):\n try:\n if args.urlblogger.startswith('http:') or args.urlblogger.startswith('https:'):\n with urlopen(args.urlblogger) as u:\n buffer = u.read()\n else:\n with open(args.urlblogger, 'rb') as f:\n buffer = f.read()\n except:\n error('Unable to read url', args.urlblogger)\n buffer = buffer.decode('utf-8')\n\n online_images = dict()\n for match in re.finditer('<div class=\"separator\"((?!<div).)*?</div>', buffer, flags=re.DOTALL):\n div_separator = match.group(0)\n div_separator = div_separator.replace(' ', '')\n elem_div = objectify.fromstring(div_separator)\n for elem_a in elem_div.iterchildren(tag='a'):\n href = elem_a.get(\"href\")\n thumb = elem_a.img.get(\"src\")\n online_images[os.path.basename(href)] = (href, thumb)\n\n # video insertion relies only on video order\n online_videos = list()\n for match in re.finditer('<iframe allowfullscreen=\"allowfullscreen\".*?</iframe>', buffer, flags=re.DOTALL):\n iframe = match.group(0)\n online_videos.append(iframe)\n\n return online_images, online_videos\n\n\ndef compare_image_buffers(imgbuf1, imgbuf2):\n \"\"\"\n return True if images read on file are identical, False otherwise\n \"\"\"\n with io.BytesIO(imgbuf1) as imgio1, io.BytesIO(imgbuf2) as imgio2:\n img1 = Image.open(imgio1)\n img2 = Image.open(imgio2)\n diff = ImageChops.difference(img1, img2)\n return not diff.getbbox()\n\n\ndef check_images(args, posts, online_images):\n result = True\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.basename in online_images:\n with open(os.path.join(args.root, media.uri), 'rb') as f:\n imgbuf1 = f.read()\n try:\n with urlopen(online_images[media.basename][0]) as u:\n imgbuf2 = u.read()\n except FileNotFoundError:\n print('File not found', online_images[media.basename][0])\n next\n if compare_image_buffers(imgbuf1, imgbuf2) is False:\n print('Files are different, upload', media.basename)\n else:\n if 1:\n print('File already online', media.basename)\n else:\n print('File is absent, upload', media.basename)\n result = False\n elif type(media) is PostVideo:\n # no check for the moment\n print('Video not checked', media.basename)\n else:\n assert False\n return result\n\n\ndef compose_blogger_html(args, title, posts, imgdata, online_videos):\n \"\"\" Compose html with blogger image urls\n \"\"\"\n for post in posts:\n for media in post.medias:\n if type(media) is PostImage:\n if media.uri not in imgdata:\n print('Image missing: ', media.uri)\n else:\n img_url, resized_url = imgdata[media.uri]\n media.uri = img_url\n media.resized_url = resized_url\n elif type(media) is PostVideo:\n if not online_videos:\n print('Video missing: ', media.uri)\n else:\n media.iframe = online_videos[0]\n del online_videos[0]\n else:\n assert False\n\n return print_html(args, posts, title, '', target='blogger')\n\n\ndef prepare_for_blogger(args):\n \"\"\"\n Export blogger html to clipboard.\n If --full, export complete html, otherwise export html extract ready to\n paste into blogger edit mode.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n online_images, online_videos = online_images_url(args)\n\n if args.check_images and check_images(args, posts, online_images) is False:\n pass\n\n html = compose_blogger_html(args, title, posts, online_images, online_videos)\n\n if args.full is False:\n html = re.search('<body>(.*)?</body>', html, flags=re.DOTALL).group(1)\n html = re.sub('<script>.*?</script>', '', html, flags=re.DOTALL)\n html = STYLE.replace('%%', '%') + html\n\n if args.dest:\n with open(args.dest, 'wt', encoding='utf-8') as f:\n f.write(html)\n else:\n clipboard.copy(html)\n\n\n# -- Other commands -----------------------------------------------------------\n\n\ndef idempotence(args):\n \"\"\"\n For testing identity between a diary file and the fle obtained after reading\n and printing it. See testing.\n \"\"\"\n title, posts = parse_markdown(os.path.join(args.root, 'index.md'))\n print_markdown(posts, title, os.path.join(args.dest, 'index.md'))\n\n\n# -- Configuration file ------------------------------------------------------\n\n\n# The following docstring is used to create the configuration file.\nCONFIG_DEFAULTS = \"\"\"\\\n[source]\n\n; source directory\n; value: valid path\nsourcedir = .\n\n; one web page per directory\n; value: true or false\nbydir = false\n\n; dispatch medias by dates\n; value: true or false\nbydate = false\n\n; include text and medias from diary file\n; value: true or false\ndiary = false\n\n; include subdirectories recursively (used when bydir is false)\n; value: true or false\nrecursive = false\n\n; interval of dates to include\n; value: source|diary|yyyymmdd-yyyymmdd or empty (= source)\ndates =\n\n; github Pages compatibility (.htlml extension and no dot in directory names)\n; value: true or false\ngithub_pages = false\n\n[thumbnails]\n\n; specifies whether or not the gallery displays media description (size, dimension, etc)\n; value: true or false\nmedia_description = true\n\n; specifies whether subdir captions are empty or the name of the subdir\n; value: true or false\nsubdir_caption = true\n\n; timestamp of thumbnail in video\n; value: number of seconds\nthumbdelay = 5\n\n; maximum number of thumbnails to remove without user confirmation\n; value: integer\nthreshold_thumbs = 10\n\n[photobox]\n\n; Allows to navigate between first and last images\n; value: true or false\nloop = false\n\n; Show gallery thumbnails below the presented photo\n; value: true or false\nthumbs = true\n\n; Should autoplay on first time or not\n; value: true or false\nautoplay = false\n\n; Autoplay interval (less than 1000 will hide the autoplay button)\n; value: milliseconds\ntime = 3000\n\n; Disable/enable mousewheel image zooming\n; value: true or false\nzoomable = true\n\n; Allow rotation of the image\n; value: true or false\nrotatable = true\n\n; Change image using mousewheel left/right\n; value: true or false\nwheelNextPrev = true\n\"\"\"\n\n\nclass MyConfigParser (ConfigParser):\n \"\"\"Add input checking.\"\"\"\n def __init__(self):\n ConfigParser.__init__(self, inline_comment_prefixes=(';',))\n\n def error(self, section, entry):\n error('Missing or incorrect config value:', '[%s]%s' % (section, entry))\n\n def getint(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getint(self, section, entry)\n else:\n return ConfigParser.getint(self, section, entry, raw=True, vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n def getboolean(self, section, entry, default=None):\n try:\n if default is None:\n return ConfigParser.getboolean(self, section, entry)\n else:\n return ConfigParser.getboolean(self, section, entry, raw=True, vars=None, fallback=default)\n except Exception as e:\n print(e)\n self.error(section, entry)\n\n\ndef configfilename(params):\n return os.path.join(params.root, '.config.ini')\n\n\ndef createconfig(config_filename):\n with open(config_filename, 'wt') as f:\n f.writelines(CONFIG_DEFAULTS)\n\n\ndef read_config(params):\n config_filename = configfilename(params)\n\n try:\n if not os.path.exists(config_filename) or params.resetcfg:\n createconfig(config_filename)\n except:\n error('Error creating configuration file')\n\n try:\n getconfig(params, config_filename)\n except Exception as e:\n error('Error reading configuration file.', str(e), 'Use --resetcfg')\n\n\ndef getconfig(options, config_filename):\n class Section:\n pass\n\n options.source = Section()\n options.thumbnails = Section()\n options.photobox = Section()\n\n config = MyConfigParser()\n config.read(config_filename)\n\n # [source]\n options.source.sourcedir = config.get('source', 'sourcedir')\n options.source.bydir = config.getboolean('source', 'bydir')\n options.source.bydate = config.getboolean('source', 'bydate')\n options.source.diary = config.getboolean('source', 'diary')\n options.source.recursive = config.getboolean('source', 'recursive')\n options.source.dates = config.get('source', 'dates')\n options.source.github_pages = config.getboolean('source', 'github_pages', default=False)\n\n # [thumbnails]\n options.thumbnails.media_description = config.getboolean('thumbnails', 'media_description')\n options.thumbnails.subdir_caption = config.getboolean('thumbnails', 'subdir_caption')\n options.thumbnails.thumbdelay = config.getint('thumbnails', 'thumbdelay')\n options.thumbnails.threshold_thumbs = config.getint('thumbnails', 'threshold_thumbs')\n options.thumbnails.threshold_htmlfiles = config.getint('thumbnails', 'threshold_htmlfiles', default=3)\n\n # [photobox]\n options.photobox.loop = config.getboolean('photobox', 'loop')\n options.photobox.thumbs = config.getboolean('photobox', 'thumbs')\n options.photobox.autoplay = config.getboolean('photobox', 'autoplay')\n options.photobox.time = config.getint('photobox', 'time')\n options.photobox.zoomable = config.getboolean('photobox', 'zoomable')\n options.photobox.rotatable = config.getboolean('photobox', 'rotatable')\n options.photobox.wheelNextPrev = config.getboolean('photobox', 'wheelNextPrev')\n\n\ndef setconfig(cfgname, section, key, value):\n config = MyConfigParser()\n config.read(cfgname)\n config.set(section, key, value)\n with open(cfgname, 'wt') as configfile:\n config.write(configfile)\n\n\ndef setconfig_cmd(args):\n config_filename = configfilename(args)\n setconfig(config_filename, *args.setcfg)\n\n\ndef update_config(args):\n # update only entries which can be modified from the command line (source section)\n updates = (\n ('sourcedir', args.sourcedir),\n ('bydir', BOOL[args.bydir]),\n ('bydate', BOOL[args.bydate]),\n ('diary', BOOL[args.diary]),\n ('recursive', BOOL[args.recursive]),\n ('dates', args.dates),\n ('github_pages', BOOL[args.github_pages]),\n )\n\n # manual update to keep comments\n cfgname = configfilename(args)\n with open(cfgname) as f:\n cfglines = [_.strip() for _ in f.readlines()]\n\n for key, value in updates:\n for iline, line in enumerate(cfglines):\n if line.startswith(key):\n cfglines[iline] = f'{key} = {value}'\n break\n\n with open(cfgname, 'wt') as f:\n for line in cfglines:\n print(line, file=f)\n\n\n# -- Error handling -----------------------------------------------------------\n\n\ndef warning(*msg):\n print(colorama.Fore.YELLOW + colorama.Style.BRIGHT +\n ' '.join(msg),\n colorama.Style.RESET_ALL)\n\n\n# Every error message error must be declared here to give a return code to the error\nERRORS = '''\\\nFile not found\nDirectory not found\nNo date in post\nIncorrect date value:\nPosts are not ordered\nUnable to read url\nNo image source (--sourcedir)\nNo blogger url (--url)\nMissing or incorrect config value:\nError creating configuration file\nError reading configuration file.\nIncorrect date format\nIncorrect parameters:\n'''\n\n\ndef errorcode(msg):\n return ERRORS.splitlines().index(msg) + 1\n\n\ndef error(*msg):\n print(colorama.Fore.RED + colorama.Style.BRIGHT +\n ' '.join(msg),\n colorama.Style.RESET_ALL)\n sys.exit(errorcode(msg[0]))\n\n\n# -- Main ---------------------------------------------------------------------\n\n\nBOOL = ('false', 'true')\n\n\ndef parse_command_line(argstring):\n parser = argparse.ArgumentParser(description=None, usage=USAGE)\n\n agroup = parser.add_argument_group('Commands')\n xgroup = agroup.add_mutually_exclusive_group()\n xgroup.add_argument('--gallery', help='source in --sourcedir',\n action='store', metavar='<root-dir>')\n agroup.add_argument('--update', help='updates gallery with parameters in config file',\n action='store', metavar='<root-dir>')\n xgroup.add_argument('--create', help='create journal from medias in --sourcedir',\n action='store', metavar='<root-dir>')\n # testing\n xgroup.add_argument('--resetcfg', help='reset config file to defaults',\n action='store', metavar='<root-dir>')\n xgroup.add_argument('--setcfg', help=argparse.SUPPRESS,\n action='store', nargs=4, metavar='<root-dir>')\n xgroup.add_argument('--idem', help=argparse.SUPPRESS,\n action='store', metavar='<root-dir>')\n # blogger\n xgroup.add_argument('--blogger',\n help='input md, html blogger ready in clipboard',\n action='store', metavar='<root-dir>')\n\n agroup = parser.add_argument_group('Parameters')\n agroup.add_argument('--bydir', help='organize gallery by subdirectory',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--bydate', help='organize gallery by date',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--diary', help='organize gallery using markdown file diary',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--recursive', help='--sourcedir scans recursively',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--dates', help='dates interval',\n action='store', default=None)\n agroup.add_argument('--sourcedir', help='media directory',\n action='store', default=None)\n agroup.add_argument('--github_pages', help='github Pages compatibility',\n action='store', default=None, choices=BOOL)\n agroup.add_argument('--dest', help='output directory',\n action='store')\n agroup.add_argument('--forcethumb', help='force calculation of thumbnails',\n action='store_true', default=False)\n\n agroup.add_argument('--full', help='full html (versus blogger ready html)',\n action='store_true', default=False)\n agroup.add_argument('--check', dest='check_images', help='check availability of medias on blogger',\n action='store_true')\n agroup.add_argument('--url', dest='urlblogger', help='blogger post url',\n action='store')\n\n if argstring is None:\n print('Type \"galerie -h\" for help')\n sys.exit(1)\n else:\n args = parser.parse_args(argstring.split())\n\n if args.update and (args.bydir or args.bydate or args.diary or args.sourcedir or\n args.recursive or args.dates or args.github_pages):\n error('Incorrect parameters:',\n '--update cannot be used with creation parameters, use explicit command')\n\n args.bydir = args.bydir == 'true'\n args.bydate = args.bydate == 'true'\n args.diary = args.diary == 'true'\n args.recursive = args.recursive == 'true'\n args.dates = 'source' if (args.dates is None) else args.dates\n args.github_pages = args.github_pages == 'true'\n\n args.root = (\n args.create or args.gallery or args.update\n or args.blogger or args.idem or args.resetcfg\n )\n\n if args.setcfg:\n args.root = args.setcfg[0]\n args.setcfg = args.setcfg[1:]\n\n return args\n\n\ndef setup_part1(args):\n \"\"\"\n Made before reading config file (config file located in args.root).\n Check and normalize root path.\n \"\"\"\n args.rootarg = args.root\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext == '':\n pass\n else:\n args.root = os.path.dirname(args.root)\n\n if args.root:\n args.root = os.path.abspath(args.root)\n if not os.path.isdir(args.root):\n if args.gallery:\n os.mkdir(args.root)\n else:\n error('Directory not found', args.root)\n\n\ndef setup_part2(args):\n \"\"\"\n Made after reading config file.\n Check for ffmpeg in path.\n Create .thumbnails dir if necessary and create .nomedia in it.\n Copy photobox file to destination dir.\n Handle priority between command line and config file.\n \"\"\"\n if args.update:\n args.sourcedir = args.source.sourcedir\n args.bydir = args.source.bydir\n args.bydate = args.source.bydate\n args.diary = args.source.diary\n args.recursive = args.source.recursive\n args.dates = args.source.dates\n args.github_pages = args.source.github_pages\n elif args.gallery:\n args.source.sourcedir = args.sourcedir\n args.source.bydir = args.bydir\n args.source.bydate = args.bydate\n args.source.diary = args.diary\n args.source.recursive = args.recursive\n args.source.dates = args.dates\n args.source.github_pages = args.github_pages\n update_config(args)\n\n if args.github_pages:\n args.html_suffix = '.html'\n else:\n args.html_suffix = '.htm'\n\n rootext = os.path.splitext(args.rootarg)[1]\n if rootext:\n args.rootname = os.path.basename(args.rootarg)\n else:\n args.rootname = 'index' + args.html_suffix\n\n if args.sourcedir:\n args.sourcedir = os.path.abspath(args.sourcedir)\n if os.path.splitdrive(args.sourcedir)[0]:\n drive, rest = os.path.splitdrive(args.sourcedir)\n args.sourcedir = drive.upper() + rest\n if not os.path.isdir(args.sourcedir):\n error('Directory not found', args.sourcedir)\n else:\n if args.gallery and args.diary is False and args.update is None:\n error('Directory not found', 'Use --sourcedir')\n\n if args.dest:\n args.dest = os.path.abspath(args.dest)\n\n if args.dest is None:\n args.dest = args.root\n\n if args.blogger and args.urlblogger is None:\n error('No blogger url (--url)')\n\n if args.gallery or args.update:\n # check for ffmpeg and ffprobe in path\n for exe in ('ffmpeg', 'ffprobe'):\n try:\n check_output([exe, '-version'])\n except FileNotFoundError:\n error('File not found', exe)\n\n if args.github_pages:\n args.thumbrep = 'thumbnails'\n else:\n args.thumbrep = '.thumbnails'\n\n args.thumbdir = os.path.join(args.dest, args.thumbrep)\n if not os.path.exists(args.thumbdir):\n os.mkdir(args.thumbdir)\n open(os.path.join(args.thumbdir, '.nomedia'), 'a').close()\n\n favicondst = os.path.join(args.dest, 'favicon.ico')\n if not os.path.isfile(favicondst):\n faviconsrc = os.path.join(os.path.dirname(__file__), 'favicon.ico')\n shutil.copyfile(faviconsrc, favicondst)\n\n photoboxdir = os.path.join(args.dest, 'photobox')\n if not os.path.exists(photoboxdir):\n photoboxsrc = os.path.join(os.path.dirname(__file__), 'photobox')\n shutil.copytree(photoboxsrc, photoboxdir)\n\n if args.dates:\n if not(args.gallery or args.create):\n # silently ignored for the moment, otherwise all other commands will\n # launch a wanrning or an error on the default --dates value\n pass\n\n if args.dates == 'source':\n pass\n elif args.dates == 'diary':\n if args.create:\n error('Incorrect date format', args.dates)\n elif re.match(r'\\d+-\\d+', args.dates):\n date1, date2 = args.dates.split('-')\n if validate_date(date1) and validate_date(date2):\n args.dates = date1, date2\n else:\n error('Incorrect date format', args.dates)\n else:\n error('Incorrect date format', args.dates)\n\n\ndef main(argstring=None):\n colorama.init()\n args = parse_command_line(argstring)\n setup_part1(args)\n read_config(args)\n setup_part2(args)\n try:\n if args.gallery or args.update:\n create_gallery(args)\n\n elif args.create:\n create_diary(args)\n\n elif args.blogger:\n prepare_for_blogger(args)\n\n elif args.idem:\n idempotence(args)\n\n elif args.setcfg:\n setconfig_cmd(args)\n\n except KeyboardInterrupt:\n warning('Interrupted by user.')\n\n\nif __name__ == '__main__':\n main(' '.join(sys.argv[1:]))\n",
"step-ids": [
79,
84,
88,
100,
110
]
}
|
[
79,
84,
88,
100,
110
] |
<|reserved_special_token_0|>
class Node:
def __init__(self):
self.metadata = list()
self.children = list()
def checksum(self):
return sum([x for x in self.metadata])
def add_child(self, child):
self.children.append(child)
pass
def value(self):
if len(self.children) == 0:
return self.checksum()
else:
val = 0
for m in self.metadata:
if m > 0 and m <= len(self.children):
val += self.children[m - 1].value()
return val
def parse_string(my_string: str) ->List[int]:
return [int(x) for x in my_string.split(' ')]
def parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:
num_children = codes[idx]
num_metadata = codes[idx + 1]
node = Node()
j = idx + 2
for i in range(num_children):
child, j = parse_node(codes, j)
node.add_child(child)
meta = list()
for i in range(num_metadata):
meta.append(codes[j])
j += 1
node.metadata = meta
return node, j
<|reserved_special_token_0|>
def checksum(node):
c = node.checksum()
for child in node.children:
c += checksum(child)
return c
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with open('data/day8_input.txt', 'r') as fp:
my_string = fp.read()
class Node:
def __init__(self):
self.metadata = list()
self.children = list()
def checksum(self):
return sum([x for x in self.metadata])
def add_child(self, child):
self.children.append(child)
pass
def value(self):
if len(self.children) == 0:
return self.checksum()
else:
val = 0
for m in self.metadata:
if m > 0 and m <= len(self.children):
val += self.children[m - 1].value()
return val
def parse_string(my_string: str) ->List[int]:
return [int(x) for x in my_string.split(' ')]
def parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:
num_children = codes[idx]
num_metadata = codes[idx + 1]
node = Node()
j = idx + 2
for i in range(num_children):
child, j = parse_node(codes, j)
node.add_child(child)
meta = list()
for i in range(num_metadata):
meta.append(codes[j])
j += 1
node.metadata = meta
return node, j
<|reserved_special_token_0|>
def checksum(node):
c = node.checksum()
for child in node.children:
c += checksum(child)
return c
print(checksum(tree))
print(tree.value())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
test_string = '2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2'
with open('data/day8_input.txt', 'r') as fp:
my_string = fp.read()
class Node:
def __init__(self):
self.metadata = list()
self.children = list()
def checksum(self):
return sum([x for x in self.metadata])
def add_child(self, child):
self.children.append(child)
pass
def value(self):
if len(self.children) == 0:
return self.checksum()
else:
val = 0
for m in self.metadata:
if m > 0 and m <= len(self.children):
val += self.children[m - 1].value()
return val
def parse_string(my_string: str) ->List[int]:
return [int(x) for x in my_string.split(' ')]
def parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:
num_children = codes[idx]
num_metadata = codes[idx + 1]
node = Node()
j = idx + 2
for i in range(num_children):
child, j = parse_node(codes, j)
node.add_child(child)
meta = list()
for i in range(num_metadata):
meta.append(codes[j])
j += 1
node.metadata = meta
return node, j
codes = parse_string(my_string)
tree, _ = parse_node(codes, 0)
def checksum(node):
c = node.checksum()
for child in node.children:
c += checksum(child)
return c
print(checksum(tree))
print(tree.value())
<|reserved_special_token_1|>
from typing import List, Tuple
test_string = '2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2'
with open('data/day8_input.txt', 'r') as fp:
my_string = fp.read()
class Node:
def __init__(self):
self.metadata = list()
self.children = list()
def checksum(self):
return sum([x for x in self.metadata])
def add_child(self, child):
self.children.append(child)
pass
def value(self):
if len(self.children) == 0:
return self.checksum()
else:
val = 0
for m in self.metadata:
if m > 0 and m <= len(self.children):
val += self.children[m - 1].value()
return val
def parse_string(my_string: str) ->List[int]:
return [int(x) for x in my_string.split(' ')]
def parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:
num_children = codes[idx]
num_metadata = codes[idx + 1]
node = Node()
j = idx + 2
for i in range(num_children):
child, j = parse_node(codes, j)
node.add_child(child)
meta = list()
for i in range(num_metadata):
meta.append(codes[j])
j += 1
node.metadata = meta
return node, j
codes = parse_string(my_string)
tree, _ = parse_node(codes, 0)
def checksum(node):
c = node.checksum()
for child in node.children:
c += checksum(child)
return c
print(checksum(tree))
print(tree.value())
<|reserved_special_token_1|>
from typing import List, Tuple
test_string = "2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2"
with open('data/day8_input.txt', 'r') as fp:
my_string = fp.read()
class Node:
def __init__(self):
self.metadata = list()
self.children = list()
def checksum(self):
return sum([x for x in self.metadata])
def add_child(self, child):
self.children.append(child)
pass
def value(self):
if len(self.children) == 0:
return self.checksum()
else:
val = 0
for m in self.metadata:
if m > 0 and m <= len(self.children):
val += self.children[m-1].value()
return val
def parse_string(my_string : str) -> List[int]:
return [int(x) for x in my_string.split(" ")]
def parse_node(codes: List[int], idx : int) -> Tuple[Node, int]:
num_children = codes[idx]
num_metadata = codes[idx + 1]
node = Node()
j = idx + 2
for i in range(num_children):
child, j = parse_node(codes, j)
node.add_child(child)
meta = list()
for i in range(num_metadata):
meta.append(codes[j])
j += 1
node.metadata = meta
return (node, j)
codes = parse_string(my_string)
tree, _ = parse_node(codes, 0)
def checksum(node):
c = node.checksum()
for child in node.children:
c += checksum(child)
return c
print(checksum(tree))
print(tree.value())
|
flexible
|
{
"blob_id": "3bea4413a41a9eecb5e3184d090b646e17892b5c",
"index": 5277,
"step-1": "<mask token>\n\n\nclass Node:\n\n def __init__(self):\n self.metadata = list()\n self.children = list()\n\n def checksum(self):\n return sum([x for x in self.metadata])\n\n def add_child(self, child):\n self.children.append(child)\n pass\n\n def value(self):\n if len(self.children) == 0:\n return self.checksum()\n else:\n val = 0\n for m in self.metadata:\n if m > 0 and m <= len(self.children):\n val += self.children[m - 1].value()\n return val\n\n\ndef parse_string(my_string: str) ->List[int]:\n return [int(x) for x in my_string.split(' ')]\n\n\ndef parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:\n num_children = codes[idx]\n num_metadata = codes[idx + 1]\n node = Node()\n j = idx + 2\n for i in range(num_children):\n child, j = parse_node(codes, j)\n node.add_child(child)\n meta = list()\n for i in range(num_metadata):\n meta.append(codes[j])\n j += 1\n node.metadata = meta\n return node, j\n\n\n<mask token>\n\n\ndef checksum(node):\n c = node.checksum()\n for child in node.children:\n c += checksum(child)\n return c\n\n\n<mask token>\n",
"step-2": "<mask token>\nwith open('data/day8_input.txt', 'r') as fp:\n my_string = fp.read()\n\n\nclass Node:\n\n def __init__(self):\n self.metadata = list()\n self.children = list()\n\n def checksum(self):\n return sum([x for x in self.metadata])\n\n def add_child(self, child):\n self.children.append(child)\n pass\n\n def value(self):\n if len(self.children) == 0:\n return self.checksum()\n else:\n val = 0\n for m in self.metadata:\n if m > 0 and m <= len(self.children):\n val += self.children[m - 1].value()\n return val\n\n\ndef parse_string(my_string: str) ->List[int]:\n return [int(x) for x in my_string.split(' ')]\n\n\ndef parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:\n num_children = codes[idx]\n num_metadata = codes[idx + 1]\n node = Node()\n j = idx + 2\n for i in range(num_children):\n child, j = parse_node(codes, j)\n node.add_child(child)\n meta = list()\n for i in range(num_metadata):\n meta.append(codes[j])\n j += 1\n node.metadata = meta\n return node, j\n\n\n<mask token>\n\n\ndef checksum(node):\n c = node.checksum()\n for child in node.children:\n c += checksum(child)\n return c\n\n\nprint(checksum(tree))\nprint(tree.value())\n",
"step-3": "<mask token>\ntest_string = '2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2'\nwith open('data/day8_input.txt', 'r') as fp:\n my_string = fp.read()\n\n\nclass Node:\n\n def __init__(self):\n self.metadata = list()\n self.children = list()\n\n def checksum(self):\n return sum([x for x in self.metadata])\n\n def add_child(self, child):\n self.children.append(child)\n pass\n\n def value(self):\n if len(self.children) == 0:\n return self.checksum()\n else:\n val = 0\n for m in self.metadata:\n if m > 0 and m <= len(self.children):\n val += self.children[m - 1].value()\n return val\n\n\ndef parse_string(my_string: str) ->List[int]:\n return [int(x) for x in my_string.split(' ')]\n\n\ndef parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:\n num_children = codes[idx]\n num_metadata = codes[idx + 1]\n node = Node()\n j = idx + 2\n for i in range(num_children):\n child, j = parse_node(codes, j)\n node.add_child(child)\n meta = list()\n for i in range(num_metadata):\n meta.append(codes[j])\n j += 1\n node.metadata = meta\n return node, j\n\n\ncodes = parse_string(my_string)\ntree, _ = parse_node(codes, 0)\n\n\ndef checksum(node):\n c = node.checksum()\n for child in node.children:\n c += checksum(child)\n return c\n\n\nprint(checksum(tree))\nprint(tree.value())\n",
"step-4": "from typing import List, Tuple\ntest_string = '2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2'\nwith open('data/day8_input.txt', 'r') as fp:\n my_string = fp.read()\n\n\nclass Node:\n\n def __init__(self):\n self.metadata = list()\n self.children = list()\n\n def checksum(self):\n return sum([x for x in self.metadata])\n\n def add_child(self, child):\n self.children.append(child)\n pass\n\n def value(self):\n if len(self.children) == 0:\n return self.checksum()\n else:\n val = 0\n for m in self.metadata:\n if m > 0 and m <= len(self.children):\n val += self.children[m - 1].value()\n return val\n\n\ndef parse_string(my_string: str) ->List[int]:\n return [int(x) for x in my_string.split(' ')]\n\n\ndef parse_node(codes: List[int], idx: int) ->Tuple[Node, int]:\n num_children = codes[idx]\n num_metadata = codes[idx + 1]\n node = Node()\n j = idx + 2\n for i in range(num_children):\n child, j = parse_node(codes, j)\n node.add_child(child)\n meta = list()\n for i in range(num_metadata):\n meta.append(codes[j])\n j += 1\n node.metadata = meta\n return node, j\n\n\ncodes = parse_string(my_string)\ntree, _ = parse_node(codes, 0)\n\n\ndef checksum(node):\n c = node.checksum()\n for child in node.children:\n c += checksum(child)\n return c\n\n\nprint(checksum(tree))\nprint(tree.value())\n",
"step-5": "from typing import List, Tuple\n\ntest_string = \"2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2\"\nwith open('data/day8_input.txt', 'r') as fp:\n my_string = fp.read()\n\n\nclass Node:\n def __init__(self):\n self.metadata = list()\n self.children = list()\n\n def checksum(self):\n return sum([x for x in self.metadata])\n\n def add_child(self, child):\n self.children.append(child)\n pass\n\n def value(self):\n if len(self.children) == 0:\n return self.checksum()\n else:\n val = 0\n for m in self.metadata:\n if m > 0 and m <= len(self.children):\n val += self.children[m-1].value()\n return val\n\n\ndef parse_string(my_string : str) -> List[int]:\n return [int(x) for x in my_string.split(\" \")]\n\n\ndef parse_node(codes: List[int], idx : int) -> Tuple[Node, int]:\n num_children = codes[idx]\n num_metadata = codes[idx + 1]\n node = Node()\n\n j = idx + 2\n for i in range(num_children):\n child, j = parse_node(codes, j)\n node.add_child(child)\n\n meta = list()\n for i in range(num_metadata):\n meta.append(codes[j])\n j += 1\n node.metadata = meta\n return (node, j)\n\n\ncodes = parse_string(my_string)\ntree, _ = parse_node(codes, 0)\n\ndef checksum(node):\n c = node.checksum()\n for child in node.children:\n c += checksum(child)\n return c\n\n\nprint(checksum(tree))\nprint(tree.value())",
"step-ids": [
8,
9,
10,
11,
12
]
}
|
[
8,
9,
10,
11,
12
] |
C = {i:0 for i in range(9)}
N = int(input())
A = list(map(int,input().split()))
for i in range(N):
a = A[i]
if a<400:
C[0] += 1
elif a<800:
C[1] += 1
elif a<1200:
C[2] += 1
elif a<1600:
C[3] += 1
elif a<2000:
C[4] += 1
elif a<2400:
C[5] += 1
elif a<2800:
C[6] += 1
elif a<3200:
C[7] += 1
else:
C[8] += 1
cmin = 0
for i in range(8):
if C[i]>0:
cmin += 1
if cmin==0:
cmin = 1
cmax = C[8]
else:
cmax = cmin+C[8]
print(cmin,cmax)
|
normal
|
{
"blob_id": "a1ca6c258298feda99b568f236611c1c496e3262",
"index": 8993,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(N):\n a = A[i]\n if a < 400:\n C[0] += 1\n elif a < 800:\n C[1] += 1\n elif a < 1200:\n C[2] += 1\n elif a < 1600:\n C[3] += 1\n elif a < 2000:\n C[4] += 1\n elif a < 2400:\n C[5] += 1\n elif a < 2800:\n C[6] += 1\n elif a < 3200:\n C[7] += 1\n else:\n C[8] += 1\n<mask token>\nfor i in range(8):\n if C[i] > 0:\n cmin += 1\nif cmin == 0:\n cmin = 1\n cmax = C[8]\nelse:\n cmax = cmin + C[8]\nprint(cmin, cmax)\n",
"step-3": "C = {i: (0) for i in range(9)}\nN = int(input())\nA = list(map(int, input().split()))\nfor i in range(N):\n a = A[i]\n if a < 400:\n C[0] += 1\n elif a < 800:\n C[1] += 1\n elif a < 1200:\n C[2] += 1\n elif a < 1600:\n C[3] += 1\n elif a < 2000:\n C[4] += 1\n elif a < 2400:\n C[5] += 1\n elif a < 2800:\n C[6] += 1\n elif a < 3200:\n C[7] += 1\n else:\n C[8] += 1\ncmin = 0\nfor i in range(8):\n if C[i] > 0:\n cmin += 1\nif cmin == 0:\n cmin = 1\n cmax = C[8]\nelse:\n cmax = cmin + C[8]\nprint(cmin, cmax)\n",
"step-4": "C = {i:0 for i in range(9)}\nN = int(input())\nA = list(map(int,input().split()))\nfor i in range(N):\n a = A[i]\n if a<400:\n C[0] += 1\n elif a<800:\n C[1] += 1\n elif a<1200:\n C[2] += 1\n elif a<1600:\n C[3] += 1\n elif a<2000:\n C[4] += 1\n elif a<2400:\n C[5] += 1\n elif a<2800:\n C[6] += 1\n elif a<3200:\n C[7] += 1\n else:\n C[8] += 1\ncmin = 0\nfor i in range(8):\n if C[i]>0:\n cmin += 1\nif cmin==0:\n cmin = 1\n cmax = C[8]\nelse:\n cmax = cmin+C[8]\nprint(cmin,cmax)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def solve_problem(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
orbit_counts = {'COM': 0}
for object in tuple(parents.keys()):
stack = [object]
while stack[-1] not in orbit_counts:
stack.append(parents[stack[-1]])
known = orbit_counts[stack.pop()]
stack.reverse()
for thing in stack:
orbit_counts[thing] = orbit_counts[parents[thing]] + 1
return sum(orbit_counts.values())
<|reserved_special_token_0|>
def get_parents(key, parents):
"""Get parents for a particular key through parents dict"""
r = [key]
while True:
this_one = r[-1]
if this_one == 'COM':
return r
r.append(parents[this_one])
def part2(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
santa = get_parents('SAN', parents)
me = get_parents('YOU', parents)
for i, planet in enumerate(me):
if planet in santa:
print(f'met at {planet}')
print('')
print(santa[:santa.index(planet) + 1])
print(len(santa[:santa.index(planet) + 1]))
print(santa.index(planet))
print('')
print(me[:i + 1])
print(len(me[:i + 1]))
print(i)
return i + santa.index(planet) - 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with open(__file__.replace('.py', '.txt')) as f:
problem = f.read()
<|reserved_special_token_0|>
def solve_problem(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
orbit_counts = {'COM': 0}
for object in tuple(parents.keys()):
stack = [object]
while stack[-1] not in orbit_counts:
stack.append(parents[stack[-1]])
known = orbit_counts[stack.pop()]
stack.reverse()
for thing in stack:
orbit_counts[thing] = orbit_counts[parents[thing]] + 1
return sum(orbit_counts.values())
if sys.argv[-1] in data.keys():
scenarios = sys.argv[-1],
else:
scenarios = tuple(data.keys())
for scenario in scenarios:
input = data[scenario]
r = solve_problem(input)
print(f'FINAL ANSWER: {r}')
print('')
print('**** PART 2 ******')
def get_parents(key, parents):
"""Get parents for a particular key through parents dict"""
r = [key]
while True:
this_one = r[-1]
if this_one == 'COM':
return r
r.append(parents[this_one])
def part2(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
santa = get_parents('SAN', parents)
me = get_parents('YOU', parents)
for i, planet in enumerate(me):
if planet in santa:
print(f'met at {planet}')
print('')
print(santa[:santa.index(planet) + 1])
print(len(santa[:santa.index(planet) + 1]))
print(santa.index(planet))
print('')
print(me[:i + 1])
print(len(me[:i + 1]))
print(i)
return i + santa.index(planet) - 1
<|reserved_special_token_0|>
for scenario in scenarios:
input = data[scenario]
r = part2(input)
print(f'Part 2 answer {r}')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with open(__file__.replace('.py', '.txt')) as f:
problem = f.read()
data = {'problem': problem, 'example':
"""COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L"""}
def solve_problem(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
orbit_counts = {'COM': 0}
for object in tuple(parents.keys()):
stack = [object]
while stack[-1] not in orbit_counts:
stack.append(parents[stack[-1]])
known = orbit_counts[stack.pop()]
stack.reverse()
for thing in stack:
orbit_counts[thing] = orbit_counts[parents[thing]] + 1
return sum(orbit_counts.values())
if sys.argv[-1] in data.keys():
scenarios = sys.argv[-1],
else:
scenarios = tuple(data.keys())
for scenario in scenarios:
input = data[scenario]
r = solve_problem(input)
print(f'FINAL ANSWER: {r}')
print('')
print('**** PART 2 ******')
def get_parents(key, parents):
"""Get parents for a particular key through parents dict"""
r = [key]
while True:
this_one = r[-1]
if this_one == 'COM':
return r
r.append(parents[this_one])
def part2(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
santa = get_parents('SAN', parents)
me = get_parents('YOU', parents)
for i, planet in enumerate(me):
if planet in santa:
print(f'met at {planet}')
print('')
print(santa[:santa.index(planet) + 1])
print(len(santa[:santa.index(planet) + 1]))
print(santa.index(planet))
print('')
print(me[:i + 1])
print(len(me[:i + 1]))
print(i)
return i + santa.index(planet) - 1
data['example'] = """COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L
K)YOU
I)SAN"""
for scenario in scenarios:
input = data[scenario]
r = part2(input)
print(f'Part 2 answer {r}')
<|reserved_special_token_1|>
import sys
import json
with open(__file__.replace('.py', '.txt')) as f:
problem = f.read()
data = {'problem': problem, 'example':
"""COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L"""}
def solve_problem(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
orbit_counts = {'COM': 0}
for object in tuple(parents.keys()):
stack = [object]
while stack[-1] not in orbit_counts:
stack.append(parents[stack[-1]])
known = orbit_counts[stack.pop()]
stack.reverse()
for thing in stack:
orbit_counts[thing] = orbit_counts[parents[thing]] + 1
return sum(orbit_counts.values())
if sys.argv[-1] in data.keys():
scenarios = sys.argv[-1],
else:
scenarios = tuple(data.keys())
for scenario in scenarios:
input = data[scenario]
r = solve_problem(input)
print(f'FINAL ANSWER: {r}')
print('')
print('**** PART 2 ******')
def get_parents(key, parents):
"""Get parents for a particular key through parents dict"""
r = [key]
while True:
this_one = r[-1]
if this_one == 'COM':
return r
r.append(parents[this_one])
def part2(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
santa = get_parents('SAN', parents)
me = get_parents('YOU', parents)
for i, planet in enumerate(me):
if planet in santa:
print(f'met at {planet}')
print('')
print(santa[:santa.index(planet) + 1])
print(len(santa[:santa.index(planet) + 1]))
print(santa.index(planet))
print('')
print(me[:i + 1])
print(len(me[:i + 1]))
print(i)
return i + santa.index(planet) - 1
data['example'] = """COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L
K)YOU
I)SAN"""
for scenario in scenarios:
input = data[scenario]
r = part2(input)
print(f'Part 2 answer {r}')
<|reserved_special_token_1|>
import sys
import json
with open(__file__.replace('.py', '.txt')) as f:
problem = f.read()
data = {
'problem': problem,
'example': """COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L""" # should give 42
}
def solve_problem(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
orbit_counts = {'COM': 0}
for object in tuple(parents.keys()):
stack = [object]
while stack[-1] not in orbit_counts:
stack.append(parents[stack[-1]])
known = orbit_counts[stack.pop()]
stack.reverse()
for thing in stack:
orbit_counts[thing] = orbit_counts[parents[thing]] + 1
return sum(orbit_counts.values())
# part 1
if sys.argv[-1] in data.keys():
scenarios = (sys.argv[-1],)
else:
scenarios = tuple(data.keys())
for scenario in scenarios:
input = data[scenario]
r = solve_problem(input)
print(f'FINAL ANSWER: {r}')
# 932, too low
print('')
print('**** PART 2 ******')
def get_parents(key, parents):
"""Get parents for a particular key through parents dict"""
r = [key]
while True:
this_one = r[-1]
if this_one == 'COM':
return r
r.append(parents[this_one])
def part2(input):
parents = {}
for i, line in enumerate(input.split('\n')):
about, object = line.split(')')
parents[object] = about
santa = get_parents('SAN', parents)
me = get_parents('YOU', parents)
for i, planet in enumerate(me):
if planet in santa:
print(f'met at {planet}')
print('')
print(santa[:santa.index(planet) + 1])
print(len(santa[:santa.index(planet) + 1]))
# minus one because we want traversials between elements in list
print(santa.index(planet))
print('')
print(me[:i + 1])
print(len(me[:i + 1]))
# minus one because we want traversials between elements in list
print(i)
# minus another one because transfering to the planet is already counted
# ...or something like that
# minus one because problem said so
return i + santa.index(planet) - 1
data['example'] = """COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L
K)YOU
I)SAN"""
for scenario in scenarios:
input = data[scenario]
r = part2(input)
print(f'Part 2 answer {r}')
# 432, too high
# 433, too high
# 431, too high
# 430, correct
|
flexible
|
{
"blob_id": "e57680c9bd09866e68ade0cfea7ce83cd6d50f58",
"index": 1596,
"step-1": "<mask token>\n\n\ndef solve_problem(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n orbit_counts = {'COM': 0}\n for object in tuple(parents.keys()):\n stack = [object]\n while stack[-1] not in orbit_counts:\n stack.append(parents[stack[-1]])\n known = orbit_counts[stack.pop()]\n stack.reverse()\n for thing in stack:\n orbit_counts[thing] = orbit_counts[parents[thing]] + 1\n return sum(orbit_counts.values())\n\n\n<mask token>\n\n\ndef get_parents(key, parents):\n \"\"\"Get parents for a particular key through parents dict\"\"\"\n r = [key]\n while True:\n this_one = r[-1]\n if this_one == 'COM':\n return r\n r.append(parents[this_one])\n\n\ndef part2(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n santa = get_parents('SAN', parents)\n me = get_parents('YOU', parents)\n for i, planet in enumerate(me):\n if planet in santa:\n print(f'met at {planet}')\n print('')\n print(santa[:santa.index(planet) + 1])\n print(len(santa[:santa.index(planet) + 1]))\n print(santa.index(planet))\n print('')\n print(me[:i + 1])\n print(len(me[:i + 1]))\n print(i)\n return i + santa.index(planet) - 1\n\n\n<mask token>\n",
"step-2": "<mask token>\nwith open(__file__.replace('.py', '.txt')) as f:\n problem = f.read()\n<mask token>\n\n\ndef solve_problem(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n orbit_counts = {'COM': 0}\n for object in tuple(parents.keys()):\n stack = [object]\n while stack[-1] not in orbit_counts:\n stack.append(parents[stack[-1]])\n known = orbit_counts[stack.pop()]\n stack.reverse()\n for thing in stack:\n orbit_counts[thing] = orbit_counts[parents[thing]] + 1\n return sum(orbit_counts.values())\n\n\nif sys.argv[-1] in data.keys():\n scenarios = sys.argv[-1],\nelse:\n scenarios = tuple(data.keys())\nfor scenario in scenarios:\n input = data[scenario]\n r = solve_problem(input)\n print(f'FINAL ANSWER: {r}')\nprint('')\nprint('**** PART 2 ******')\n\n\ndef get_parents(key, parents):\n \"\"\"Get parents for a particular key through parents dict\"\"\"\n r = [key]\n while True:\n this_one = r[-1]\n if this_one == 'COM':\n return r\n r.append(parents[this_one])\n\n\ndef part2(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n santa = get_parents('SAN', parents)\n me = get_parents('YOU', parents)\n for i, planet in enumerate(me):\n if planet in santa:\n print(f'met at {planet}')\n print('')\n print(santa[:santa.index(planet) + 1])\n print(len(santa[:santa.index(planet) + 1]))\n print(santa.index(planet))\n print('')\n print(me[:i + 1])\n print(len(me[:i + 1]))\n print(i)\n return i + santa.index(planet) - 1\n\n\n<mask token>\nfor scenario in scenarios:\n input = data[scenario]\n r = part2(input)\n print(f'Part 2 answer {r}')\n",
"step-3": "<mask token>\nwith open(__file__.replace('.py', '.txt')) as f:\n problem = f.read()\ndata = {'problem': problem, 'example':\n \"\"\"COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\"\"\"}\n\n\ndef solve_problem(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n orbit_counts = {'COM': 0}\n for object in tuple(parents.keys()):\n stack = [object]\n while stack[-1] not in orbit_counts:\n stack.append(parents[stack[-1]])\n known = orbit_counts[stack.pop()]\n stack.reverse()\n for thing in stack:\n orbit_counts[thing] = orbit_counts[parents[thing]] + 1\n return sum(orbit_counts.values())\n\n\nif sys.argv[-1] in data.keys():\n scenarios = sys.argv[-1],\nelse:\n scenarios = tuple(data.keys())\nfor scenario in scenarios:\n input = data[scenario]\n r = solve_problem(input)\n print(f'FINAL ANSWER: {r}')\nprint('')\nprint('**** PART 2 ******')\n\n\ndef get_parents(key, parents):\n \"\"\"Get parents for a particular key through parents dict\"\"\"\n r = [key]\n while True:\n this_one = r[-1]\n if this_one == 'COM':\n return r\n r.append(parents[this_one])\n\n\ndef part2(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n santa = get_parents('SAN', parents)\n me = get_parents('YOU', parents)\n for i, planet in enumerate(me):\n if planet in santa:\n print(f'met at {planet}')\n print('')\n print(santa[:santa.index(planet) + 1])\n print(len(santa[:santa.index(planet) + 1]))\n print(santa.index(planet))\n print('')\n print(me[:i + 1])\n print(len(me[:i + 1]))\n print(i)\n return i + santa.index(planet) - 1\n\n\ndata['example'] = \"\"\"COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\nK)YOU\nI)SAN\"\"\"\nfor scenario in scenarios:\n input = data[scenario]\n r = part2(input)\n print(f'Part 2 answer {r}')\n",
"step-4": "import sys\nimport json\nwith open(__file__.replace('.py', '.txt')) as f:\n problem = f.read()\ndata = {'problem': problem, 'example':\n \"\"\"COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\"\"\"}\n\n\ndef solve_problem(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n orbit_counts = {'COM': 0}\n for object in tuple(parents.keys()):\n stack = [object]\n while stack[-1] not in orbit_counts:\n stack.append(parents[stack[-1]])\n known = orbit_counts[stack.pop()]\n stack.reverse()\n for thing in stack:\n orbit_counts[thing] = orbit_counts[parents[thing]] + 1\n return sum(orbit_counts.values())\n\n\nif sys.argv[-1] in data.keys():\n scenarios = sys.argv[-1],\nelse:\n scenarios = tuple(data.keys())\nfor scenario in scenarios:\n input = data[scenario]\n r = solve_problem(input)\n print(f'FINAL ANSWER: {r}')\nprint('')\nprint('**** PART 2 ******')\n\n\ndef get_parents(key, parents):\n \"\"\"Get parents for a particular key through parents dict\"\"\"\n r = [key]\n while True:\n this_one = r[-1]\n if this_one == 'COM':\n return r\n r.append(parents[this_one])\n\n\ndef part2(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n santa = get_parents('SAN', parents)\n me = get_parents('YOU', parents)\n for i, planet in enumerate(me):\n if planet in santa:\n print(f'met at {planet}')\n print('')\n print(santa[:santa.index(planet) + 1])\n print(len(santa[:santa.index(planet) + 1]))\n print(santa.index(planet))\n print('')\n print(me[:i + 1])\n print(len(me[:i + 1]))\n print(i)\n return i + santa.index(planet) - 1\n\n\ndata['example'] = \"\"\"COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\nK)YOU\nI)SAN\"\"\"\nfor scenario in scenarios:\n input = data[scenario]\n r = part2(input)\n print(f'Part 2 answer {r}')\n",
"step-5": "import sys\nimport json\n\n\nwith open(__file__.replace('.py', '.txt')) as f:\n problem = f.read()\n\n\ndata = {\n 'problem': problem,\n 'example': \"\"\"COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\"\"\" # should give 42\n}\n\n\ndef solve_problem(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n\n orbit_counts = {'COM': 0}\n\n for object in tuple(parents.keys()):\n stack = [object]\n while stack[-1] not in orbit_counts:\n stack.append(parents[stack[-1]])\n known = orbit_counts[stack.pop()]\n stack.reverse()\n for thing in stack:\n orbit_counts[thing] = orbit_counts[parents[thing]] + 1\n\n return sum(orbit_counts.values())\n\n\n# part 1\nif sys.argv[-1] in data.keys():\n scenarios = (sys.argv[-1],)\nelse:\n scenarios = tuple(data.keys())\n\n\nfor scenario in scenarios:\n input = data[scenario]\n r = solve_problem(input)\n print(f'FINAL ANSWER: {r}')\n\n\n# 932, too low\n\nprint('')\nprint('**** PART 2 ******')\n\n\ndef get_parents(key, parents):\n \"\"\"Get parents for a particular key through parents dict\"\"\"\n r = [key]\n while True:\n this_one = r[-1]\n if this_one == 'COM':\n return r\n r.append(parents[this_one])\n\n\ndef part2(input):\n parents = {}\n for i, line in enumerate(input.split('\\n')):\n about, object = line.split(')')\n parents[object] = about\n\n santa = get_parents('SAN', parents)\n me = get_parents('YOU', parents)\n\n for i, planet in enumerate(me):\n if planet in santa:\n print(f'met at {planet}')\n print('')\n print(santa[:santa.index(planet) + 1])\n print(len(santa[:santa.index(planet) + 1]))\n # minus one because we want traversials between elements in list\n print(santa.index(planet))\n print('')\n print(me[:i + 1])\n print(len(me[:i + 1]))\n # minus one because we want traversials between elements in list\n print(i)\n # minus another one because transfering to the planet is already counted\n # ...or something like that\n # minus one because problem said so\n return i + santa.index(planet) - 1\n\ndata['example'] = \"\"\"COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\nK)YOU\nI)SAN\"\"\"\n\nfor scenario in scenarios:\n input = data[scenario]\n r = part2(input)\n print(f'Part 2 answer {r}')\n\n# 432, too high\n# 433, too high\n# 431, too high\n# 430, correct\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from room import Room
from player import Player
from item import Item
# Declare all the rooms
room = {
'outside': Room("Outside Cave Entrance",
"North of you, the cave mount beckons"),
'foyer': Room("Foyer", """Dim light filters in from the south. Dusty
passages run north and east."""),
'overlook': Room("Grand Overlook", """A steep cliff appears before you, falling
into the darkness. Ahead to the north, a light flickers in
the distance, but there is no way across the chasm."""),
'narrow': Room("Narrow Passage", """The narrow passage bends here from west
to north. The smell of gold permeates the air."""),
'treasure': Room("Treasure Chamber", """You've found the long-lost treasure
chamber! Sadly, it has already been completely emptied by
earlier adventurers. The only exit is to the south."""),
}
# Link rooms together
room['outside'].n_to = room['foyer']
room['foyer'].s_to = room['outside']
room['foyer'].n_to = room['overlook']
room['foyer'].e_to = room['narrow']
room['overlook'].s_to = room['foyer']
room['narrow'].w_to = room['foyer']
room['narrow'].n_to = room['treasure']
room['treasure'].s_to = room['narrow']
# list of items
itemList = {
'Brick': Item('Brick', 'Build settlement and roads'),
'Wood': Item('Wood', 'Build settlement and roads'),
'Sheep': Item('Sheep', 'Build settlement and get development cards'),
'Grain': Item('Grain', 'Build settlement, cities and get development cards'),
'Stone': Item('Stone', 'Build cities and get development cards'),
'DCard': Item('Development Cards', 'Get special powers')
}
# items assignment to rooms
room['outside'].items = [itemList['Brick'], itemList['Wood']]
room['foyer'].items = [itemList['Brick'], itemList['Grain'], itemList['Sheep']]
room['overlook'].items = [itemList['Wood'], itemList['Sheep'], itemList['Grain']]
room['narrow'].items = [itemList['Stone'], itemList['Grain']]
room['treasure'].items = [itemList['Brick'], itemList['Grain'], itemList['Wood']]
#
# Main
#
# Make a new player object that is currently in the 'outside' room.
# Write a loop that:
#
# * Prints the current room name
# * Prints the current description (the textwrap module might be useful here).
# * Waits for user input and decides what to do.
#
# If the user enters a cardinal direction, attempt to move to the room there.
# Print an error message if the movement isn't allowed.
#
# If the user enters "q", quit the game.
# Get Player Name
playerName = input('Hello, What is your name?\n')
# Initialize player with given name
player = Player(playerName, room['outside'])
while True:
# print player item inventory
print('\nPlayer Items:')
for item in player.items:
print('\t', item)
# print current room and items available in the room
print('Room - ', player.current_room)
print('Items in Room:')
for item in player.current_room.items:
print('\t', item)
# Get the User Input
userInput = input('What would you like to do? \n\tEnter [n], [s], [e] or [w] to move across rooms \n\tEnter \"take [item_name]\" or \"drop [item_name]\" to add or remove items \n\tEnter [q] to quit the game\n')
userInputWords = userInput.split(' ')
if userInput == 'q':
print('You chose to quit!')
break
elif len(userInputWords) == 1:
player.move(userInput)
elif len(userInputWords) == 2:
verb = userInputWords[0]
itemName = userInputWords[1]
if itemName in itemList:
player.action(verb, itemList[itemName])
else:
print('Invalid item choice')
|
normal
|
{
"blob_id": "beb536b6d8883daaa7e41da03145dd98aa223cbf",
"index": 5036,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n print('\\nPlayer Items:')\n for item in player.items:\n print('\\t', item)\n print('Room - ', player.current_room)\n print('Items in Room:')\n for item in player.current_room.items:\n print('\\t', item)\n userInput = input(\n \"\"\"What would you like to do? \n\tEnter [n], [s], [e] or [w] to move across rooms \n\tEnter \"take [item_name]\" or \"drop [item_name]\" to add or remove items \n\tEnter [q] to quit the game\n\"\"\"\n )\n userInputWords = userInput.split(' ')\n if userInput == 'q':\n print('You chose to quit!')\n break\n elif len(userInputWords) == 1:\n player.move(userInput)\n elif len(userInputWords) == 2:\n verb = userInputWords[0]\n itemName = userInputWords[1]\n if itemName in itemList:\n player.action(verb, itemList[itemName])\n else:\n print('Invalid item choice')\n",
"step-3": "<mask token>\nroom = {'outside': Room('Outside Cave Entrance',\n 'North of you, the cave mount beckons'), 'foyer': Room('Foyer',\n \"\"\"Dim light filters in from the south. Dusty\npassages run north and east.\"\"\"\n ), 'overlook': Room('Grand Overlook',\n \"\"\"A steep cliff appears before you, falling\ninto the darkness. Ahead to the north, a light flickers in\nthe distance, but there is no way across the chasm.\"\"\"\n ), 'narrow': Room('Narrow Passage',\n \"\"\"The narrow passage bends here from west\nto north. The smell of gold permeates the air.\"\"\"\n ), 'treasure': Room('Treasure Chamber',\n \"\"\"You've found the long-lost treasure\nchamber! Sadly, it has already been completely emptied by\nearlier adventurers. The only exit is to the south.\"\"\"\n )}\nroom['outside'].n_to = room['foyer']\nroom['foyer'].s_to = room['outside']\nroom['foyer'].n_to = room['overlook']\nroom['foyer'].e_to = room['narrow']\nroom['overlook'].s_to = room['foyer']\nroom['narrow'].w_to = room['foyer']\nroom['narrow'].n_to = room['treasure']\nroom['treasure'].s_to = room['narrow']\nitemList = {'Brick': Item('Brick', 'Build settlement and roads'), 'Wood':\n Item('Wood', 'Build settlement and roads'), 'Sheep': Item('Sheep',\n 'Build settlement and get development cards'), 'Grain': Item('Grain',\n 'Build settlement, cities and get development cards'), 'Stone': Item(\n 'Stone', 'Build cities and get development cards'), 'DCard': Item(\n 'Development Cards', 'Get special powers')}\nroom['outside'].items = [itemList['Brick'], itemList['Wood']]\nroom['foyer'].items = [itemList['Brick'], itemList['Grain'], itemList['Sheep']]\nroom['overlook'].items = [itemList['Wood'], itemList['Sheep'], itemList[\n 'Grain']]\nroom['narrow'].items = [itemList['Stone'], itemList['Grain']]\nroom['treasure'].items = [itemList['Brick'], itemList['Grain'], itemList[\n 'Wood']]\nplayerName = input('Hello, What is your name?\\n')\nplayer = Player(playerName, room['outside'])\nwhile True:\n print('\\nPlayer Items:')\n for item in player.items:\n print('\\t', item)\n print('Room - ', player.current_room)\n print('Items in Room:')\n for item in player.current_room.items:\n print('\\t', item)\n userInput = input(\n \"\"\"What would you like to do? \n\tEnter [n], [s], [e] or [w] to move across rooms \n\tEnter \"take [item_name]\" or \"drop [item_name]\" to add or remove items \n\tEnter [q] to quit the game\n\"\"\"\n )\n userInputWords = userInput.split(' ')\n if userInput == 'q':\n print('You chose to quit!')\n break\n elif len(userInputWords) == 1:\n player.move(userInput)\n elif len(userInputWords) == 2:\n verb = userInputWords[0]\n itemName = userInputWords[1]\n if itemName in itemList:\n player.action(verb, itemList[itemName])\n else:\n print('Invalid item choice')\n",
"step-4": "from room import Room\nfrom player import Player\nfrom item import Item\nroom = {'outside': Room('Outside Cave Entrance',\n 'North of you, the cave mount beckons'), 'foyer': Room('Foyer',\n \"\"\"Dim light filters in from the south. Dusty\npassages run north and east.\"\"\"\n ), 'overlook': Room('Grand Overlook',\n \"\"\"A steep cliff appears before you, falling\ninto the darkness. Ahead to the north, a light flickers in\nthe distance, but there is no way across the chasm.\"\"\"\n ), 'narrow': Room('Narrow Passage',\n \"\"\"The narrow passage bends here from west\nto north. The smell of gold permeates the air.\"\"\"\n ), 'treasure': Room('Treasure Chamber',\n \"\"\"You've found the long-lost treasure\nchamber! Sadly, it has already been completely emptied by\nearlier adventurers. The only exit is to the south.\"\"\"\n )}\nroom['outside'].n_to = room['foyer']\nroom['foyer'].s_to = room['outside']\nroom['foyer'].n_to = room['overlook']\nroom['foyer'].e_to = room['narrow']\nroom['overlook'].s_to = room['foyer']\nroom['narrow'].w_to = room['foyer']\nroom['narrow'].n_to = room['treasure']\nroom['treasure'].s_to = room['narrow']\nitemList = {'Brick': Item('Brick', 'Build settlement and roads'), 'Wood':\n Item('Wood', 'Build settlement and roads'), 'Sheep': Item('Sheep',\n 'Build settlement and get development cards'), 'Grain': Item('Grain',\n 'Build settlement, cities and get development cards'), 'Stone': Item(\n 'Stone', 'Build cities and get development cards'), 'DCard': Item(\n 'Development Cards', 'Get special powers')}\nroom['outside'].items = [itemList['Brick'], itemList['Wood']]\nroom['foyer'].items = [itemList['Brick'], itemList['Grain'], itemList['Sheep']]\nroom['overlook'].items = [itemList['Wood'], itemList['Sheep'], itemList[\n 'Grain']]\nroom['narrow'].items = [itemList['Stone'], itemList['Grain']]\nroom['treasure'].items = [itemList['Brick'], itemList['Grain'], itemList[\n 'Wood']]\nplayerName = input('Hello, What is your name?\\n')\nplayer = Player(playerName, room['outside'])\nwhile True:\n print('\\nPlayer Items:')\n for item in player.items:\n print('\\t', item)\n print('Room - ', player.current_room)\n print('Items in Room:')\n for item in player.current_room.items:\n print('\\t', item)\n userInput = input(\n \"\"\"What would you like to do? \n\tEnter [n], [s], [e] or [w] to move across rooms \n\tEnter \"take [item_name]\" or \"drop [item_name]\" to add or remove items \n\tEnter [q] to quit the game\n\"\"\"\n )\n userInputWords = userInput.split(' ')\n if userInput == 'q':\n print('You chose to quit!')\n break\n elif len(userInputWords) == 1:\n player.move(userInput)\n elif len(userInputWords) == 2:\n verb = userInputWords[0]\n itemName = userInputWords[1]\n if itemName in itemList:\n player.action(verb, itemList[itemName])\n else:\n print('Invalid item choice')\n",
"step-5": "from room import Room\nfrom player import Player\nfrom item import Item\n# Declare all the rooms\n\nroom = {\n 'outside': Room(\"Outside Cave Entrance\",\n \"North of you, the cave mount beckons\"),\n\n 'foyer': Room(\"Foyer\", \"\"\"Dim light filters in from the south. Dusty\npassages run north and east.\"\"\"),\n\n 'overlook': Room(\"Grand Overlook\", \"\"\"A steep cliff appears before you, falling\ninto the darkness. Ahead to the north, a light flickers in\nthe distance, but there is no way across the chasm.\"\"\"),\n\n 'narrow': Room(\"Narrow Passage\", \"\"\"The narrow passage bends here from west\nto north. The smell of gold permeates the air.\"\"\"),\n\n 'treasure': Room(\"Treasure Chamber\", \"\"\"You've found the long-lost treasure\nchamber! Sadly, it has already been completely emptied by\nearlier adventurers. The only exit is to the south.\"\"\"),\n}\n\n\n# Link rooms together\n\nroom['outside'].n_to = room['foyer']\nroom['foyer'].s_to = room['outside']\nroom['foyer'].n_to = room['overlook']\nroom['foyer'].e_to = room['narrow']\nroom['overlook'].s_to = room['foyer']\nroom['narrow'].w_to = room['foyer']\nroom['narrow'].n_to = room['treasure']\nroom['treasure'].s_to = room['narrow']\n\n# list of items\nitemList = {\n 'Brick': Item('Brick', 'Build settlement and roads'),\n 'Wood': Item('Wood', 'Build settlement and roads'),\n 'Sheep': Item('Sheep', 'Build settlement and get development cards'),\n 'Grain': Item('Grain', 'Build settlement, cities and get development cards'),\n 'Stone': Item('Stone', 'Build cities and get development cards'),\n 'DCard': Item('Development Cards', 'Get special powers')\n}\n\n# items assignment to rooms\nroom['outside'].items = [itemList['Brick'], itemList['Wood']]\nroom['foyer'].items = [itemList['Brick'], itemList['Grain'], itemList['Sheep']]\nroom['overlook'].items = [itemList['Wood'], itemList['Sheep'], itemList['Grain']]\nroom['narrow'].items = [itemList['Stone'], itemList['Grain']]\nroom['treasure'].items = [itemList['Brick'], itemList['Grain'], itemList['Wood']]\n#\n# Main\n#\n\n# Make a new player object that is currently in the 'outside' room.\n\n# Write a loop that:\n#\n# * Prints the current room name\n# * Prints the current description (the textwrap module might be useful here).\n# * Waits for user input and decides what to do.\n#\n# If the user enters a cardinal direction, attempt to move to the room there.\n# Print an error message if the movement isn't allowed.\n#\n# If the user enters \"q\", quit the game.\n\n# Get Player Name\nplayerName = input('Hello, What is your name?\\n')\n\n# Initialize player with given name\nplayer = Player(playerName, room['outside'])\n\nwhile True:\n # print player item inventory\n print('\\nPlayer Items:')\n for item in player.items:\n print('\\t', item)\n\n # print current room and items available in the room\n print('Room - ', player.current_room)\n print('Items in Room:')\n for item in player.current_room.items:\n print('\\t', item)\n\n\n # Get the User Input\n userInput = input('What would you like to do? \\n\\tEnter [n], [s], [e] or [w] to move across rooms \\n\\tEnter \\\"take [item_name]\\\" or \\\"drop [item_name]\\\" to add or remove items \\n\\tEnter [q] to quit the game\\n')\n\n userInputWords = userInput.split(' ')\n\n if userInput == 'q':\n print('You chose to quit!')\n break\n elif len(userInputWords) == 1:\n player.move(userInput)\n elif len(userInputWords) == 2:\n verb = userInputWords[0]\n itemName = userInputWords[1]\n if itemName in itemList:\n player.action(verb, itemList[itemName])\n else:\n print('Invalid item choice')\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# @File :fi_handlers.py
# @Author:ZengYu
# @Date :2019/5/16
# @software:PyCharm
import tornado.web
import tornado.websocket
from PIL import Image
import base64
from model.flower_identify import flower_identify
class FlowersInfo():
flowersInfo = ["月季花(学名:Rosa chinensis Jacq.): 被称为花中皇后,又称“月月红”,是常绿、半常绿低矮灌木,四季开花,一般为红色,或粉色、偶有白色和黄色,可作为观赏植物,也可作为药用植物,亦称月季。有三个自然变种,现代月季花型多样,有单瓣和重瓣,还有高心卷边等优美花型;其色彩艳丽、丰富,不仅有红、粉黄、白等单色,还有混色、银边等品种;多数品种有芳香。月季的品种繁多,世界上已有近万种,中国也有千种以上。",
"绣球(学名:Hydrangea macrophylla (Thunb.) Ser. ): 为虎耳草科绣球属植物。灌木,高1-4米;茎常于基部发出多数放射枝而形成一圆形灌丛;枝圆柱形。叶纸质或近革质,倒卵形或阔椭圆形。伞房状聚伞花序近球形,直径8-20厘米,具短的总花梗,花密集,粉红色、淡蓝色或白色;花瓣长圆形,长3-3.5毫米。蒴果未成熟,长陀螺状;种子未熟。花期6-8月。",
"万寿菊(Tagetes erecta L)为菊科万寿菊属一年生草本植物,茎直立,粗壮,具纵细条棱,分枝向上平展。叶羽状分裂;沿叶缘有少数腺体。头状花序单生;总苞杯状,顶端具齿尖;舌状花黄色或暗橙色;管状花花冠黄色。瘦果线形,基部缩小,黑色或褐色,被短微毛;冠毛有1-2个长芒和2-3个短而钝的鳞片。花期7-9月。",
"三色堇(学名:Viola tricolor L.)是堇菜科堇菜属的二年或多年生草本植物。基生叶叶片长卵形或披针形,具长柄,茎生叶叶片卵形、长圆形或长圆披针形,先端圆或钝,边缘具稀疏的圆齿或钝锯齿。三色堇是欧洲常见的野花物种,也常栽培于公园中,是冰岛、波兰的国花。花朵通常每花有紫、白、黄三色,故名三色堇。该物种较耐寒,喜凉爽,开花受光照影响较大。",
"石榴花,落叶灌木或小乔木石榴的花;为石榴属植物,石榴树干灰褐色,有片状剥落,嫩枝黄绿光滑,常呈四棱形,枝端多为刺状,无顶芽。石榴花单叶对生或簇生,矩圆形或倒卵形,新叶嫩绿或古铜色。花朵至数朵生于枝顶或叶腋,花萼钟形,肉质,先端6裂,表面光滑具腊质,橙红色,宿存。花瓣5~7枚红色或白色,单瓣或重瓣。"]
class FlowerIdentify(tornado.web.RequestHandler):
def get(self):
self.render("flower_identify.html")
class IdentifyHandler(tornado.websocket.WebSocketHandler):
def post(self):
# 从JSON字符串读取图片数据
dataUrl = self.get_body_argument("image")
Orientation = self.get_body_argument("orientation") # 得到图片方向以便旋转处理
content = base64.b64decode(dataUrl)
'''保存到图片target.jpg'''
file = open('./static/images/target.jpg', 'wb')
file.write(content)
file.close()
'''图片旋转270(根据实际情况)'''
img = Image.open('./static/images/target.jpg')
if Orientation == "3":
img = img.rotate(180, expand=True)
elif Orientation == "6":
img = img.rotate(270, expand=True)
elif Orientation == "8":
img = img.rotate(90, expand=True)
img.save('./static/images/target.jpg')
'''调用函数识别'''
flowerIndex = flower_identify() # 调用识别函数
flowerInfo = FlowersInfo.flowersInfo[flowerIndex] # 得到结果,并从FlowersInfo里找到该花的资料
self.render("fi_result.html", data=flowerInfo)
|
normal
|
{
"blob_id": "1c3b1776f14a085bec90be11028c87dc47f00293",
"index": 1722,
"step-1": "<mask token>\n\n\nclass FlowerIdentify(tornado.web.RequestHandler):\n\n def get(self):\n self.render('flower_identify.html')\n\n\nclass IdentifyHandler(tornado.websocket.WebSocketHandler):\n\n def post(self):\n dataUrl = self.get_body_argument('image')\n Orientation = self.get_body_argument('orientation')\n content = base64.b64decode(dataUrl)\n \"\"\"保存到图片target.jpg\"\"\"\n file = open('./static/images/target.jpg', 'wb')\n file.write(content)\n file.close()\n \"\"\"图片旋转270(根据实际情况)\"\"\"\n img = Image.open('./static/images/target.jpg')\n if Orientation == '3':\n img = img.rotate(180, expand=True)\n elif Orientation == '6':\n img = img.rotate(270, expand=True)\n elif Orientation == '8':\n img = img.rotate(90, expand=True)\n img.save('./static/images/target.jpg')\n \"\"\"调用函数识别\"\"\"\n flowerIndex = flower_identify()\n flowerInfo = FlowersInfo.flowersInfo[flowerIndex]\n self.render('fi_result.html', data=flowerInfo)\n",
"step-2": "<mask token>\n\n\nclass FlowersInfo:\n <mask token>\n\n\nclass FlowerIdentify(tornado.web.RequestHandler):\n\n def get(self):\n self.render('flower_identify.html')\n\n\nclass IdentifyHandler(tornado.websocket.WebSocketHandler):\n\n def post(self):\n dataUrl = self.get_body_argument('image')\n Orientation = self.get_body_argument('orientation')\n content = base64.b64decode(dataUrl)\n \"\"\"保存到图片target.jpg\"\"\"\n file = open('./static/images/target.jpg', 'wb')\n file.write(content)\n file.close()\n \"\"\"图片旋转270(根据实际情况)\"\"\"\n img = Image.open('./static/images/target.jpg')\n if Orientation == '3':\n img = img.rotate(180, expand=True)\n elif Orientation == '6':\n img = img.rotate(270, expand=True)\n elif Orientation == '8':\n img = img.rotate(90, expand=True)\n img.save('./static/images/target.jpg')\n \"\"\"调用函数识别\"\"\"\n flowerIndex = flower_identify()\n flowerInfo = FlowersInfo.flowersInfo[flowerIndex]\n self.render('fi_result.html', data=flowerInfo)\n",
"step-3": "<mask token>\n\n\nclass FlowersInfo:\n flowersInfo = [\n '月季花(学名:Rosa chinensis Jacq.): 被称为花中皇后,又称“月月红”,是常绿、半常绿低矮灌木,四季开花,一般为红色,或粉色、偶有白色和黄色,可作为观赏植物,也可作为药用植物,亦称月季。有三个自然变种,现代月季花型多样,有单瓣和重瓣,还有高心卷边等优美花型;其色彩艳丽、丰富,不仅有红、粉黄、白等单色,还有混色、银边等品种;多数品种有芳香。月季的品种繁多,世界上已有近万种,中国也有千种以上。'\n ,\n '绣球(学名:Hydrangea macrophylla (Thunb.) Ser. ): 为虎耳草科绣球属植物。灌木,高1-4米;茎常于基部发出多数放射枝而形成一圆形灌丛;枝圆柱形。叶纸质或近革质,倒卵形或阔椭圆形。伞房状聚伞花序近球形,直径8-20厘米,具短的总花梗,花密集,粉红色、淡蓝色或白色;花瓣长圆形,长3-3.5毫米。蒴果未成熟,长陀螺状;种子未熟。花期6-8月。'\n ,\n '万寿菊(Tagetes erecta L)为菊科万寿菊属一年生草本植物,茎直立,粗壮,具纵细条棱,分枝向上平展。叶羽状分裂;沿叶缘有少数腺体。头状花序单生;总苞杯状,顶端具齿尖;舌状花黄色或暗橙色;管状花花冠黄色。瘦果线形,基部缩小,黑色或褐色,被短微毛;冠毛有1-2个长芒和2-3个短而钝的鳞片。花期7-9月。'\n ,\n '三色堇(学名:Viola tricolor L.)是堇菜科堇菜属的二年或多年生草本植物。基生叶叶片长卵形或披针形,具长柄,茎生叶叶片卵形、长圆形或长圆披针形,先端圆或钝,边缘具稀疏的圆齿或钝锯齿。三色堇是欧洲常见的野花物种,也常栽培于公园中,是冰岛、波兰的国花。花朵通常每花有紫、白、黄三色,故名三色堇。该物种较耐寒,喜凉爽,开花受光照影响较大。'\n ,\n '石榴花,落叶灌木或小乔木石榴的花;为石榴属植物,石榴树干灰褐色,有片状剥落,嫩枝黄绿光滑,常呈四棱形,枝端多为刺状,无顶芽。石榴花单叶对生或簇生,矩圆形或倒卵形,新叶嫩绿或古铜色。花朵至数朵生于枝顶或叶腋,花萼钟形,肉质,先端6裂,表面光滑具腊质,橙红色,宿存。花瓣5~7枚红色或白色,单瓣或重瓣。'\n ]\n\n\nclass FlowerIdentify(tornado.web.RequestHandler):\n\n def get(self):\n self.render('flower_identify.html')\n\n\nclass IdentifyHandler(tornado.websocket.WebSocketHandler):\n\n def post(self):\n dataUrl = self.get_body_argument('image')\n Orientation = self.get_body_argument('orientation')\n content = base64.b64decode(dataUrl)\n \"\"\"保存到图片target.jpg\"\"\"\n file = open('./static/images/target.jpg', 'wb')\n file.write(content)\n file.close()\n \"\"\"图片旋转270(根据实际情况)\"\"\"\n img = Image.open('./static/images/target.jpg')\n if Orientation == '3':\n img = img.rotate(180, expand=True)\n elif Orientation == '6':\n img = img.rotate(270, expand=True)\n elif Orientation == '8':\n img = img.rotate(90, expand=True)\n img.save('./static/images/target.jpg')\n \"\"\"调用函数识别\"\"\"\n flowerIndex = flower_identify()\n flowerInfo = FlowersInfo.flowersInfo[flowerIndex]\n self.render('fi_result.html', data=flowerInfo)\n",
"step-4": "import tornado.web\nimport tornado.websocket\nfrom PIL import Image\nimport base64\nfrom model.flower_identify import flower_identify\n\n\nclass FlowersInfo:\n flowersInfo = [\n '月季花(学名:Rosa chinensis Jacq.): 被称为花中皇后,又称“月月红”,是常绿、半常绿低矮灌木,四季开花,一般为红色,或粉色、偶有白色和黄色,可作为观赏植物,也可作为药用植物,亦称月季。有三个自然变种,现代月季花型多样,有单瓣和重瓣,还有高心卷边等优美花型;其色彩艳丽、丰富,不仅有红、粉黄、白等单色,还有混色、银边等品种;多数品种有芳香。月季的品种繁多,世界上已有近万种,中国也有千种以上。'\n ,\n '绣球(学名:Hydrangea macrophylla (Thunb.) Ser. ): 为虎耳草科绣球属植物。灌木,高1-4米;茎常于基部发出多数放射枝而形成一圆形灌丛;枝圆柱形。叶纸质或近革质,倒卵形或阔椭圆形。伞房状聚伞花序近球形,直径8-20厘米,具短的总花梗,花密集,粉红色、淡蓝色或白色;花瓣长圆形,长3-3.5毫米。蒴果未成熟,长陀螺状;种子未熟。花期6-8月。'\n ,\n '万寿菊(Tagetes erecta L)为菊科万寿菊属一年生草本植物,茎直立,粗壮,具纵细条棱,分枝向上平展。叶羽状分裂;沿叶缘有少数腺体。头状花序单生;总苞杯状,顶端具齿尖;舌状花黄色或暗橙色;管状花花冠黄色。瘦果线形,基部缩小,黑色或褐色,被短微毛;冠毛有1-2个长芒和2-3个短而钝的鳞片。花期7-9月。'\n ,\n '三色堇(学名:Viola tricolor L.)是堇菜科堇菜属的二年或多年生草本植物。基生叶叶片长卵形或披针形,具长柄,茎生叶叶片卵形、长圆形或长圆披针形,先端圆或钝,边缘具稀疏的圆齿或钝锯齿。三色堇是欧洲常见的野花物种,也常栽培于公园中,是冰岛、波兰的国花。花朵通常每花有紫、白、黄三色,故名三色堇。该物种较耐寒,喜凉爽,开花受光照影响较大。'\n ,\n '石榴花,落叶灌木或小乔木石榴的花;为石榴属植物,石榴树干灰褐色,有片状剥落,嫩枝黄绿光滑,常呈四棱形,枝端多为刺状,无顶芽。石榴花单叶对生或簇生,矩圆形或倒卵形,新叶嫩绿或古铜色。花朵至数朵生于枝顶或叶腋,花萼钟形,肉质,先端6裂,表面光滑具腊质,橙红色,宿存。花瓣5~7枚红色或白色,单瓣或重瓣。'\n ]\n\n\nclass FlowerIdentify(tornado.web.RequestHandler):\n\n def get(self):\n self.render('flower_identify.html')\n\n\nclass IdentifyHandler(tornado.websocket.WebSocketHandler):\n\n def post(self):\n dataUrl = self.get_body_argument('image')\n Orientation = self.get_body_argument('orientation')\n content = base64.b64decode(dataUrl)\n \"\"\"保存到图片target.jpg\"\"\"\n file = open('./static/images/target.jpg', 'wb')\n file.write(content)\n file.close()\n \"\"\"图片旋转270(根据实际情况)\"\"\"\n img = Image.open('./static/images/target.jpg')\n if Orientation == '3':\n img = img.rotate(180, expand=True)\n elif Orientation == '6':\n img = img.rotate(270, expand=True)\n elif Orientation == '8':\n img = img.rotate(90, expand=True)\n img.save('./static/images/target.jpg')\n \"\"\"调用函数识别\"\"\"\n flowerIndex = flower_identify()\n flowerInfo = FlowersInfo.flowersInfo[flowerIndex]\n self.render('fi_result.html', data=flowerInfo)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n# @File :fi_handlers.py\r\n# @Author:ZengYu\r\n# @Date :2019/5/16\r\n# @software:PyCharm\r\n\r\nimport tornado.web\r\nimport tornado.websocket\r\nfrom PIL import Image\r\nimport base64\r\nfrom model.flower_identify import flower_identify\r\n\r\nclass FlowersInfo():\r\n flowersInfo = [\"月季花(学名:Rosa chinensis Jacq.): 被称为花中皇后,又称“月月红”,是常绿、半常绿低矮灌木,四季开花,一般为红色,或粉色、偶有白色和黄色,可作为观赏植物,也可作为药用植物,亦称月季。有三个自然变种,现代月季花型多样,有单瓣和重瓣,还有高心卷边等优美花型;其色彩艳丽、丰富,不仅有红、粉黄、白等单色,还有混色、银边等品种;多数品种有芳香。月季的品种繁多,世界上已有近万种,中国也有千种以上。\",\r\n \"绣球(学名:Hydrangea macrophylla (Thunb.) Ser. ): 为虎耳草科绣球属植物。灌木,高1-4米;茎常于基部发出多数放射枝而形成一圆形灌丛;枝圆柱形。叶纸质或近革质,倒卵形或阔椭圆形。伞房状聚伞花序近球形,直径8-20厘米,具短的总花梗,花密集,粉红色、淡蓝色或白色;花瓣长圆形,长3-3.5毫米。蒴果未成熟,长陀螺状;种子未熟。花期6-8月。\",\r\n \"万寿菊(Tagetes erecta L)为菊科万寿菊属一年生草本植物,茎直立,粗壮,具纵细条棱,分枝向上平展。叶羽状分裂;沿叶缘有少数腺体。头状花序单生;总苞杯状,顶端具齿尖;舌状花黄色或暗橙色;管状花花冠黄色。瘦果线形,基部缩小,黑色或褐色,被短微毛;冠毛有1-2个长芒和2-3个短而钝的鳞片。花期7-9月。\",\r\n \"三色堇(学名:Viola tricolor L.)是堇菜科堇菜属的二年或多年生草本植物。基生叶叶片长卵形或披针形,具长柄,茎生叶叶片卵形、长圆形或长圆披针形,先端圆或钝,边缘具稀疏的圆齿或钝锯齿。三色堇是欧洲常见的野花物种,也常栽培于公园中,是冰岛、波兰的国花。花朵通常每花有紫、白、黄三色,故名三色堇。该物种较耐寒,喜凉爽,开花受光照影响较大。\",\r\n \"石榴花,落叶灌木或小乔木石榴的花;为石榴属植物,石榴树干灰褐色,有片状剥落,嫩枝黄绿光滑,常呈四棱形,枝端多为刺状,无顶芽。石榴花单叶对生或簇生,矩圆形或倒卵形,新叶嫩绿或古铜色。花朵至数朵生于枝顶或叶腋,花萼钟形,肉质,先端6裂,表面光滑具腊质,橙红色,宿存。花瓣5~7枚红色或白色,单瓣或重瓣。\"]\r\n\r\nclass FlowerIdentify(tornado.web.RequestHandler):\r\n def get(self):\r\n self.render(\"flower_identify.html\")\r\n\r\nclass IdentifyHandler(tornado.websocket.WebSocketHandler):\r\n def post(self):\r\n # 从JSON字符串读取图片数据\r\n dataUrl = self.get_body_argument(\"image\")\r\n Orientation = self.get_body_argument(\"orientation\") # 得到图片方向以便旋转处理\r\n content = base64.b64decode(dataUrl)\r\n '''保存到图片target.jpg'''\r\n file = open('./static/images/target.jpg', 'wb')\r\n file.write(content)\r\n file.close()\r\n\r\n '''图片旋转270(根据实际情况)'''\r\n img = Image.open('./static/images/target.jpg')\r\n if Orientation == \"3\":\r\n img = img.rotate(180, expand=True)\r\n elif Orientation == \"6\":\r\n img = img.rotate(270, expand=True)\r\n elif Orientation == \"8\":\r\n img = img.rotate(90, expand=True)\r\n img.save('./static/images/target.jpg')\r\n\r\n '''调用函数识别'''\r\n flowerIndex = flower_identify() # 调用识别函数\r\n flowerInfo = FlowersInfo.flowersInfo[flowerIndex] # 得到结果,并从FlowersInfo里找到该花的资料\r\n self.render(\"fi_result.html\", data=flowerInfo)\r\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class ImagemProfessor(models.Model):
professor = models.ForeignKey(Professor, on_delete=models.CASCADE)
foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Professor(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ImagemProfessor(models.Model):
professor = models.ForeignKey(Professor, on_delete=models.CASCADE)
foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Professor(models.Model):
nome = models.CharField(max_length=100)
apelido = models.CharField(max_length=30)
descricao = models.TextField(max_length=1000)
def __str__(self):
return self.nome
class ImagemProfessor(models.Model):
professor = models.ForeignKey(Professor, on_delete=models.CASCADE)
foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)
<|reserved_special_token_1|>
from django.db import models
class Professor(models.Model):
nome = models.CharField(max_length=100)
apelido = models.CharField(max_length=30)
descricao = models.TextField(max_length=1000)
def __str__(self):
return self.nome
class ImagemProfessor(models.Model):
professor = models.ForeignKey(Professor, on_delete=models.CASCADE)
foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)
|
flexible
|
{
"blob_id": "acb879cb72e5b3ac897a271dc680e4ca763d2122",
"index": 7541,
"step-1": "<mask token>\n\n\nclass ImagemProfessor(models.Model):\n professor = models.ForeignKey(Professor, on_delete=models.CASCADE)\n foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)\n",
"step-2": "<mask token>\n\n\nclass Professor(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ImagemProfessor(models.Model):\n professor = models.ForeignKey(Professor, on_delete=models.CASCADE)\n foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)\n",
"step-3": "<mask token>\n\n\nclass Professor(models.Model):\n nome = models.CharField(max_length=100)\n apelido = models.CharField(max_length=30)\n descricao = models.TextField(max_length=1000)\n\n def __str__(self):\n return self.nome\n\n\nclass ImagemProfessor(models.Model):\n professor = models.ForeignKey(Professor, on_delete=models.CASCADE)\n foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)\n",
"step-4": "from django.db import models\n\n\nclass Professor(models.Model):\n nome = models.CharField(max_length=100)\n apelido = models.CharField(max_length=30)\n descricao = models.TextField(max_length=1000)\n\n def __str__(self):\n return self.nome\n\n\nclass ImagemProfessor(models.Model):\n professor = models.ForeignKey(Professor, on_delete=models.CASCADE)\n foto = models.ImageField(upload_to='fotos/%d/%m/%Y/', blank=True)\n",
"step-5": null,
"step-ids": [
2,
3,
5,
6
]
}
|
[
2,
3,
5,
6
] |
import pandas as pd
import numpy as np
from datetime import timedelta
import scipy.optimize as optim
from scipy import stats
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from gen_utils.gen_io import read_run_params,log_msg
#############################################
params = read_run_params()
run = params["current_run"]
out_home = params["container"]+"output/"
out_dir = out_home+run+"/"
df = pd.read_csv(out_dir+"4_mcov_strain_variant_map_covid_pangolin_db_input_"+run+".csv")
df = df[df.quality=="HQ"]
#########################
tag="B.1.617.Family"
voi=["B.1.617.2","AY.2","AY.3"]
start_date = "4-15-2021"
end_date = "7-20-2021"
days_since="4/15/2021"
days= 180
# voi="P.1"
# start_date = "1-1-2021"
# end_date = "6-20-2021"
# days_since="1/1/2021"
# days= 360
#################################
###take unique patients with variant
keep_mrns_variant = np.unique(df[df.variant.isin(voi)]["MRN"])
df_mrns = df[df.MRN.isin(keep_mrns_variant)]
df_mrns = df_mrns[df_mrns.variant.isin(voi)] ###important step--remove non b117 variant
df_mrns.sort_values("COLLECTION_DT",inplace=True)
df_mrns.drop_duplicates("MRN",keep="first",inplace=True)
keep_mrns_not_variant = np.unique(df[~df.variant.isin(voi)]["MRN"])
df_mrns_not_variant = df[df.MRN.isin(keep_mrns_not_variant)]
df_mrns_not_variant = df_mrns_not_variant[~df_mrns_not_variant.variant.isin(voi)]
df_mrns_not_variant.sort_values("COLLECTION_DT",inplace=True)
df_mrns_not_variant.drop_duplicates("MRN",keep="first",inplace=True)
df_2 = df_mrns.append(df_mrns_not_variant)
df_2.drop_duplicates("MRN",keep="first",inplace=True)
df = df_2
df=df[['MCoVNumber','COLLECTION_DT','variant']]
#####################################
df.COLLECTION_DT = pd.to_datetime(df.COLLECTION_DT)
df.COLLECTION_DT = df.COLLECTION_DT.dt.date
df = df[ ( (df.COLLECTION_DT>=pd.to_datetime(start_date)) &
(df.COLLECTION_DT<pd.to_datetime(end_date))
)
]
df.sort_values("COLLECTION_DT",inplace=True)
df.variant.fillna(0,inplace=True)
#########################
df.variant = [1 if x in voi else 0 for x in df.variant]
df_variant = df.groupby("COLLECTION_DT")["variant"].agg("sum").reset_index()
df_count = df.groupby("COLLECTION_DT")["variant"].agg("count").reset_index()
dates = pd.date_range(df.COLLECTION_DT.min(), (df.COLLECTION_DT.max() + timedelta(days=1) )-timedelta(days=1),freq='d')
df_data = pd.DataFrame(dates)
df_data.columns=["dates"]
df_data["date_step"]= [x for x in range(1,df_data.shape[0]+1,1)]
df_data["total"] = df_count.variant
df_data["variant"] = df_variant.variant
df_data["variant_csum"] = np.cumsum(df_variant.variant.values)
df_data["variant_percent"]=[ (x/y)*100 for x,y in zip(df_data.variant,df_data.total)]
df_data.to_excel("final_Data_"+tag+"_log_growth_6_28_2021.xlsx",index=False)
def my_logistic(x,a,b,c):
return c/(1 + a * np.exp(-b*x))
x = np.array(df_data.date_step)
# y = np.array(df_data.variant_csum)
y = np.array(df_data.variant_percent)
##########optimize
po = np.random.exponential(size=3)
bounds = (0,[1000.,2.0,100.])
(a,b,c),cov = optim.curve_fit(my_logistic,x,y,bounds=bounds,p0=po)
# for i in range(1,20,1):
# try:
# # po = np.array([250.,0.10,99.])
# po= np.random.exponential(size=3)
# bounds = ([0.,0.1,0.],[1000.,float(i),100.])
# (a,b,c),cov = optim.curve_fit(my_logistic,x,y,bounds=bounds,p0=po)
# print(c)
# except:
# print("error for " + str(i))
# po = np.array([250.,0.10,99.])
# bounds = ([0.,0.1,99.],[1000.,1.0,100.])
# (a,b,c),cov = optim.curve_fit(my_logistic,x,y,bounds=bounds,p0=po)
plt.scatter(x,y)
plt.plot(x,my_logistic(x,a,b,c))
xprime = np.array([x for x in range(1,170,1)])
yprime = my_logistic(xprime,a,b,c)
plt.plot(xprime,yprime)
plt.savefig("log_fit_best_fit"+tag+".png")
plt.close()
############################## method 2 using t distribution on error --> perfer this one
from scipy.stats.distributions import t
pars, pcov = (a,b,c),cov
alpha = 0.05 # 95% confidence interval = 100*(1-alpha)
n = len(y) # number of data points
p = len(pars) # number of parameters
dof = max(0, n - p) # number of degrees of freedom
# student-t value for the dof and confidence level
tval = t.ppf(1.0-alpha/2., dof)
val_dw = 0
val_up = 0
for i, p,var in zip(range(n), pars, np.diag(pcov)):
sigma = var**0.5
if i==1:
val_dw = p - sigma*tval
val_up = p + sigma*tval
print ('p{0}: {1} [{2} {3}]'.format(i, p,
p - sigma*tval,
p + sigma*tval))
plt.plot(x,y,'bo',markersize=5,label='Observed')
xprime = np.array([x for x in range(1,days,1)])
yprime = my_logistic(xprime,a,b,c)
plt.plot(xprime,yprime,label='Predicted')
xpred = np.array([x for x in range(1,days,1)])
ypred_dw = my_logistic(xpred,pars[0],val_dw,pars[2])
ypred_up = my_logistic(xpred,pars[0],val_up,pars[2])
plt.fill_between(xpred, ypred_up,ypred_dw,color = 'k', alpha = 0.1,label='95% CI')
plt.title("Logistic growth model ["+tag+"]",fontsize=18)
plt.xlabel("Days since "+days_since,fontsize=15)
plt.ylabel("Percent of patients ",fontsize=15)
plt.legend()
plt.savefig("log_pred_best_fit"+tag+".png")
plt.close()
gr=b;dt = 70/(gr*100);print(dt)
gr=val_up;dt = 70/(gr*100);print(dt)
gr=val_dw;dt = 70/(gr*100);print(dt)
|
normal
|
{
"blob_id": "dcef5f34a62939d992a109e991552e612bf5bad5",
"index": 4619,
"step-1": "<mask token>\n\n\ndef my_logistic(x, a, b, c):\n return c / (1 + a * np.exp(-b * x))\n\n\n<mask token>\n",
"step-2": "<mask token>\nmatplotlib.use('Agg')\n<mask token>\ndf_mrns.sort_values('COLLECTION_DT', inplace=True)\ndf_mrns.drop_duplicates('MRN', keep='first', inplace=True)\n<mask token>\ndf_mrns_not_variant.sort_values('COLLECTION_DT', inplace=True)\ndf_mrns_not_variant.drop_duplicates('MRN', keep='first', inplace=True)\n<mask token>\ndf_2.drop_duplicates('MRN', keep='first', inplace=True)\n<mask token>\ndf.sort_values('COLLECTION_DT', inplace=True)\ndf.variant.fillna(0, inplace=True)\n<mask token>\ndf_data.to_excel('final_Data_' + tag + '_log_growth_6_28_2021.xlsx', index=\n False)\n\n\ndef my_logistic(x, a, b, c):\n return c / (1 + a * np.exp(-b * x))\n\n\n<mask token>\nplt.scatter(x, y)\nplt.plot(x, my_logistic(x, a, b, c))\n<mask token>\nplt.plot(xprime, yprime)\nplt.savefig('log_fit_best_fit' + tag + '.png')\nplt.close()\n<mask token>\nfor i, p, var in zip(range(n), pars, np.diag(pcov)):\n sigma = var ** 0.5\n if i == 1:\n val_dw = p - sigma * tval\n val_up = p + sigma * tval\n print('p{0}: {1} [{2} {3}]'.format(i, p, p - sigma * tval, p + sigma *\n tval))\nplt.plot(x, y, 'bo', markersize=5, label='Observed')\n<mask token>\nplt.plot(xprime, yprime, label='Predicted')\n<mask token>\nplt.fill_between(xpred, ypred_up, ypred_dw, color='k', alpha=0.1, label=\n '95% CI')\nplt.title('Logistic growth model [' + tag + ']', fontsize=18)\nplt.xlabel('Days since ' + days_since, fontsize=15)\nplt.ylabel('Percent of patients ', fontsize=15)\nplt.legend()\nplt.savefig('log_pred_best_fit' + tag + '.png')\nplt.close()\n<mask token>\nprint(dt)\n<mask token>\nprint(dt)\n<mask token>\nprint(dt)\n",
"step-3": "<mask token>\nmatplotlib.use('Agg')\n<mask token>\nparams = read_run_params()\nrun = params['current_run']\nout_home = params['container'] + 'output/'\nout_dir = out_home + run + '/'\ndf = pd.read_csv(out_dir +\n '4_mcov_strain_variant_map_covid_pangolin_db_input_' + run + '.csv')\ndf = df[df.quality == 'HQ']\ntag = 'B.1.617.Family'\nvoi = ['B.1.617.2', 'AY.2', 'AY.3']\nstart_date = '4-15-2021'\nend_date = '7-20-2021'\ndays_since = '4/15/2021'\ndays = 180\nkeep_mrns_variant = np.unique(df[df.variant.isin(voi)]['MRN'])\ndf_mrns = df[df.MRN.isin(keep_mrns_variant)]\ndf_mrns = df_mrns[df_mrns.variant.isin(voi)]\ndf_mrns.sort_values('COLLECTION_DT', inplace=True)\ndf_mrns.drop_duplicates('MRN', keep='first', inplace=True)\nkeep_mrns_not_variant = np.unique(df[~df.variant.isin(voi)]['MRN'])\ndf_mrns_not_variant = df[df.MRN.isin(keep_mrns_not_variant)]\ndf_mrns_not_variant = df_mrns_not_variant[~df_mrns_not_variant.variant.isin\n (voi)]\ndf_mrns_not_variant.sort_values('COLLECTION_DT', inplace=True)\ndf_mrns_not_variant.drop_duplicates('MRN', keep='first', inplace=True)\ndf_2 = df_mrns.append(df_mrns_not_variant)\ndf_2.drop_duplicates('MRN', keep='first', inplace=True)\ndf = df_2\ndf = df[['MCoVNumber', 'COLLECTION_DT', 'variant']]\ndf.COLLECTION_DT = pd.to_datetime(df.COLLECTION_DT)\ndf.COLLECTION_DT = df.COLLECTION_DT.dt.date\ndf = df[(df.COLLECTION_DT >= pd.to_datetime(start_date)) & (df.\n COLLECTION_DT < pd.to_datetime(end_date))]\ndf.sort_values('COLLECTION_DT', inplace=True)\ndf.variant.fillna(0, inplace=True)\ndf.variant = [(1 if x in voi else 0) for x in df.variant]\ndf_variant = df.groupby('COLLECTION_DT')['variant'].agg('sum').reset_index()\ndf_count = df.groupby('COLLECTION_DT')['variant'].agg('count').reset_index()\ndates = pd.date_range(df.COLLECTION_DT.min(), df.COLLECTION_DT.max() +\n timedelta(days=1) - timedelta(days=1), freq='d')\ndf_data = pd.DataFrame(dates)\ndf_data.columns = ['dates']\ndf_data['date_step'] = [x for x in range(1, df_data.shape[0] + 1, 1)]\ndf_data['total'] = df_count.variant\ndf_data['variant'] = df_variant.variant\ndf_data['variant_csum'] = np.cumsum(df_variant.variant.values)\ndf_data['variant_percent'] = [(x / y * 100) for x, y in zip(df_data.variant,\n df_data.total)]\ndf_data.to_excel('final_Data_' + tag + '_log_growth_6_28_2021.xlsx', index=\n False)\n\n\ndef my_logistic(x, a, b, c):\n return c / (1 + a * np.exp(-b * x))\n\n\nx = np.array(df_data.date_step)\ny = np.array(df_data.variant_percent)\npo = np.random.exponential(size=3)\nbounds = 0, [1000.0, 2.0, 100.0]\n(a, b, c), cov = optim.curve_fit(my_logistic, x, y, bounds=bounds, p0=po)\nplt.scatter(x, y)\nplt.plot(x, my_logistic(x, a, b, c))\nxprime = np.array([x for x in range(1, 170, 1)])\nyprime = my_logistic(xprime, a, b, c)\nplt.plot(xprime, yprime)\nplt.savefig('log_fit_best_fit' + tag + '.png')\nplt.close()\n<mask token>\npars, pcov = (a, b, c), cov\nalpha = 0.05\nn = len(y)\np = len(pars)\ndof = max(0, n - p)\ntval = t.ppf(1.0 - alpha / 2.0, dof)\nval_dw = 0\nval_up = 0\nfor i, p, var in zip(range(n), pars, np.diag(pcov)):\n sigma = var ** 0.5\n if i == 1:\n val_dw = p - sigma * tval\n val_up = p + sigma * tval\n print('p{0}: {1} [{2} {3}]'.format(i, p, p - sigma * tval, p + sigma *\n tval))\nplt.plot(x, y, 'bo', markersize=5, label='Observed')\nxprime = np.array([x for x in range(1, days, 1)])\nyprime = my_logistic(xprime, a, b, c)\nplt.plot(xprime, yprime, label='Predicted')\nxpred = np.array([x for x in range(1, days, 1)])\nypred_dw = my_logistic(xpred, pars[0], val_dw, pars[2])\nypred_up = my_logistic(xpred, pars[0], val_up, pars[2])\nplt.fill_between(xpred, ypred_up, ypred_dw, color='k', alpha=0.1, label=\n '95% CI')\nplt.title('Logistic growth model [' + tag + ']', fontsize=18)\nplt.xlabel('Days since ' + days_since, fontsize=15)\nplt.ylabel('Percent of patients ', fontsize=15)\nplt.legend()\nplt.savefig('log_pred_best_fit' + tag + '.png')\nplt.close()\ngr = b\ndt = 70 / (gr * 100)\nprint(dt)\ngr = val_up\ndt = 70 / (gr * 100)\nprint(dt)\ngr = val_dw\ndt = 70 / (gr * 100)\nprint(dt)\n",
"step-4": "import pandas as pd\nimport numpy as np\nfrom datetime import timedelta\nimport scipy.optimize as optim\nfrom scipy import stats\nimport matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.pyplot as plt\nfrom gen_utils.gen_io import read_run_params, log_msg\nparams = read_run_params()\nrun = params['current_run']\nout_home = params['container'] + 'output/'\nout_dir = out_home + run + '/'\ndf = pd.read_csv(out_dir +\n '4_mcov_strain_variant_map_covid_pangolin_db_input_' + run + '.csv')\ndf = df[df.quality == 'HQ']\ntag = 'B.1.617.Family'\nvoi = ['B.1.617.2', 'AY.2', 'AY.3']\nstart_date = '4-15-2021'\nend_date = '7-20-2021'\ndays_since = '4/15/2021'\ndays = 180\nkeep_mrns_variant = np.unique(df[df.variant.isin(voi)]['MRN'])\ndf_mrns = df[df.MRN.isin(keep_mrns_variant)]\ndf_mrns = df_mrns[df_mrns.variant.isin(voi)]\ndf_mrns.sort_values('COLLECTION_DT', inplace=True)\ndf_mrns.drop_duplicates('MRN', keep='first', inplace=True)\nkeep_mrns_not_variant = np.unique(df[~df.variant.isin(voi)]['MRN'])\ndf_mrns_not_variant = df[df.MRN.isin(keep_mrns_not_variant)]\ndf_mrns_not_variant = df_mrns_not_variant[~df_mrns_not_variant.variant.isin\n (voi)]\ndf_mrns_not_variant.sort_values('COLLECTION_DT', inplace=True)\ndf_mrns_not_variant.drop_duplicates('MRN', keep='first', inplace=True)\ndf_2 = df_mrns.append(df_mrns_not_variant)\ndf_2.drop_duplicates('MRN', keep='first', inplace=True)\ndf = df_2\ndf = df[['MCoVNumber', 'COLLECTION_DT', 'variant']]\ndf.COLLECTION_DT = pd.to_datetime(df.COLLECTION_DT)\ndf.COLLECTION_DT = df.COLLECTION_DT.dt.date\ndf = df[(df.COLLECTION_DT >= pd.to_datetime(start_date)) & (df.\n COLLECTION_DT < pd.to_datetime(end_date))]\ndf.sort_values('COLLECTION_DT', inplace=True)\ndf.variant.fillna(0, inplace=True)\ndf.variant = [(1 if x in voi else 0) for x in df.variant]\ndf_variant = df.groupby('COLLECTION_DT')['variant'].agg('sum').reset_index()\ndf_count = df.groupby('COLLECTION_DT')['variant'].agg('count').reset_index()\ndates = pd.date_range(df.COLLECTION_DT.min(), df.COLLECTION_DT.max() +\n timedelta(days=1) - timedelta(days=1), freq='d')\ndf_data = pd.DataFrame(dates)\ndf_data.columns = ['dates']\ndf_data['date_step'] = [x for x in range(1, df_data.shape[0] + 1, 1)]\ndf_data['total'] = df_count.variant\ndf_data['variant'] = df_variant.variant\ndf_data['variant_csum'] = np.cumsum(df_variant.variant.values)\ndf_data['variant_percent'] = [(x / y * 100) for x, y in zip(df_data.variant,\n df_data.total)]\ndf_data.to_excel('final_Data_' + tag + '_log_growth_6_28_2021.xlsx', index=\n False)\n\n\ndef my_logistic(x, a, b, c):\n return c / (1 + a * np.exp(-b * x))\n\n\nx = np.array(df_data.date_step)\ny = np.array(df_data.variant_percent)\npo = np.random.exponential(size=3)\nbounds = 0, [1000.0, 2.0, 100.0]\n(a, b, c), cov = optim.curve_fit(my_logistic, x, y, bounds=bounds, p0=po)\nplt.scatter(x, y)\nplt.plot(x, my_logistic(x, a, b, c))\nxprime = np.array([x for x in range(1, 170, 1)])\nyprime = my_logistic(xprime, a, b, c)\nplt.plot(xprime, yprime)\nplt.savefig('log_fit_best_fit' + tag + '.png')\nplt.close()\nfrom scipy.stats.distributions import t\npars, pcov = (a, b, c), cov\nalpha = 0.05\nn = len(y)\np = len(pars)\ndof = max(0, n - p)\ntval = t.ppf(1.0 - alpha / 2.0, dof)\nval_dw = 0\nval_up = 0\nfor i, p, var in zip(range(n), pars, np.diag(pcov)):\n sigma = var ** 0.5\n if i == 1:\n val_dw = p - sigma * tval\n val_up = p + sigma * tval\n print('p{0}: {1} [{2} {3}]'.format(i, p, p - sigma * tval, p + sigma *\n tval))\nplt.plot(x, y, 'bo', markersize=5, label='Observed')\nxprime = np.array([x for x in range(1, days, 1)])\nyprime = my_logistic(xprime, a, b, c)\nplt.plot(xprime, yprime, label='Predicted')\nxpred = np.array([x for x in range(1, days, 1)])\nypred_dw = my_logistic(xpred, pars[0], val_dw, pars[2])\nypred_up = my_logistic(xpred, pars[0], val_up, pars[2])\nplt.fill_between(xpred, ypred_up, ypred_dw, color='k', alpha=0.1, label=\n '95% CI')\nplt.title('Logistic growth model [' + tag + ']', fontsize=18)\nplt.xlabel('Days since ' + days_since, fontsize=15)\nplt.ylabel('Percent of patients ', fontsize=15)\nplt.legend()\nplt.savefig('log_pred_best_fit' + tag + '.png')\nplt.close()\ngr = b\ndt = 70 / (gr * 100)\nprint(dt)\ngr = val_up\ndt = 70 / (gr * 100)\nprint(dt)\ngr = val_dw\ndt = 70 / (gr * 100)\nprint(dt)\n",
"step-5": "import pandas as pd\nimport numpy as np\nfrom datetime import timedelta\nimport scipy.optimize as optim\nfrom scipy import stats\nimport matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.pyplot as plt\nfrom gen_utils.gen_io import read_run_params,log_msg\n\n\n\n#############################################\n\nparams = read_run_params()\nrun = params[\"current_run\"]\nout_home = params[\"container\"]+\"output/\" \nout_dir = out_home+run+\"/\"\n\ndf = pd.read_csv(out_dir+\"4_mcov_strain_variant_map_covid_pangolin_db_input_\"+run+\".csv\")\ndf = df[df.quality==\"HQ\"]\n\n\n \n#########################\ntag=\"B.1.617.Family\"\nvoi=[\"B.1.617.2\",\"AY.2\",\"AY.3\"]\nstart_date = \"4-15-2021\"\nend_date = \"7-20-2021\"\ndays_since=\"4/15/2021\"\ndays= 180\n\n# voi=\"P.1\"\n# start_date = \"1-1-2021\"\n# end_date = \"6-20-2021\"\n# days_since=\"1/1/2021\"\n# days= 360\n#################################\n\n\n###take unique patients with variant\nkeep_mrns_variant = np.unique(df[df.variant.isin(voi)][\"MRN\"])\ndf_mrns = df[df.MRN.isin(keep_mrns_variant)]\ndf_mrns = df_mrns[df_mrns.variant.isin(voi)] ###important step--remove non b117 variant \ndf_mrns.sort_values(\"COLLECTION_DT\",inplace=True)\ndf_mrns.drop_duplicates(\"MRN\",keep=\"first\",inplace=True)\n\n\nkeep_mrns_not_variant = np.unique(df[~df.variant.isin(voi)][\"MRN\"])\ndf_mrns_not_variant = df[df.MRN.isin(keep_mrns_not_variant)]\ndf_mrns_not_variant = df_mrns_not_variant[~df_mrns_not_variant.variant.isin(voi)]\ndf_mrns_not_variant.sort_values(\"COLLECTION_DT\",inplace=True)\ndf_mrns_not_variant.drop_duplicates(\"MRN\",keep=\"first\",inplace=True)\n\ndf_2 = df_mrns.append(df_mrns_not_variant)\ndf_2.drop_duplicates(\"MRN\",keep=\"first\",inplace=True)\n\ndf = df_2\n\n\ndf=df[['MCoVNumber','COLLECTION_DT','variant']]\n\n#####################################\n\ndf.COLLECTION_DT = pd.to_datetime(df.COLLECTION_DT)\ndf.COLLECTION_DT = df.COLLECTION_DT.dt.date\n\n\ndf = df[ ( (df.COLLECTION_DT>=pd.to_datetime(start_date)) &\n (df.COLLECTION_DT<pd.to_datetime(end_date)) \n )\n ]\ndf.sort_values(\"COLLECTION_DT\",inplace=True)\n\ndf.variant.fillna(0,inplace=True)\n#########################\n\ndf.variant = [1 if x in voi else 0 for x in df.variant]\n\n\ndf_variant = df.groupby(\"COLLECTION_DT\")[\"variant\"].agg(\"sum\").reset_index()\ndf_count = df.groupby(\"COLLECTION_DT\")[\"variant\"].agg(\"count\").reset_index()\n\ndates = pd.date_range(df.COLLECTION_DT.min(), (df.COLLECTION_DT.max() + timedelta(days=1) )-timedelta(days=1),freq='d')\ndf_data = pd.DataFrame(dates)\ndf_data.columns=[\"dates\"]\ndf_data[\"date_step\"]= [x for x in range(1,df_data.shape[0]+1,1)]\ndf_data[\"total\"] = df_count.variant\ndf_data[\"variant\"] = df_variant.variant\ndf_data[\"variant_csum\"] = np.cumsum(df_variant.variant.values)\ndf_data[\"variant_percent\"]=[ (x/y)*100 for x,y in zip(df_data.variant,df_data.total)]\ndf_data.to_excel(\"final_Data_\"+tag+\"_log_growth_6_28_2021.xlsx\",index=False)\n\ndef my_logistic(x,a,b,c):\n return c/(1 + a * np.exp(-b*x))\n\nx = np.array(df_data.date_step)\n# y = np.array(df_data.variant_csum)\ny = np.array(df_data.variant_percent)\n\n##########optimize\npo = np.random.exponential(size=3)\nbounds = (0,[1000.,2.0,100.])\n(a,b,c),cov = optim.curve_fit(my_logistic,x,y,bounds=bounds,p0=po)\n\n# for i in range(1,20,1):\n# try:\n# # po = np.array([250.,0.10,99.])\n# po= np.random.exponential(size=3)\n# bounds = ([0.,0.1,0.],[1000.,float(i),100.])\n# (a,b,c),cov = optim.curve_fit(my_logistic,x,y,bounds=bounds,p0=po)\n# print(c)\n# except:\n# print(\"error for \" + str(i))\n\n# po = np.array([250.,0.10,99.])\n# bounds = ([0.,0.1,99.],[1000.,1.0,100.])\n# (a,b,c),cov = optim.curve_fit(my_logistic,x,y,bounds=bounds,p0=po)\n\nplt.scatter(x,y)\nplt.plot(x,my_logistic(x,a,b,c))\nxprime = np.array([x for x in range(1,170,1)])\nyprime = my_logistic(xprime,a,b,c)\nplt.plot(xprime,yprime)\nplt.savefig(\"log_fit_best_fit\"+tag+\".png\")\nplt.close()\n\n\n############################## method 2 using t distribution on error --> perfer this one \n\nfrom scipy.stats.distributions import t\n\npars, pcov = (a,b,c),cov\n\nalpha = 0.05 # 95% confidence interval = 100*(1-alpha)\n\nn = len(y) # number of data points\np = len(pars) # number of parameters\n\ndof = max(0, n - p) # number of degrees of freedom\n\n# student-t value for the dof and confidence level\ntval = t.ppf(1.0-alpha/2., dof) \n\nval_dw = 0\nval_up = 0\nfor i, p,var in zip(range(n), pars, np.diag(pcov)):\n sigma = var**0.5\n \n if i==1:\n val_dw = p - sigma*tval\n val_up = p + sigma*tval\n\n print ('p{0}: {1} [{2} {3}]'.format(i, p,\n p - sigma*tval,\n p + sigma*tval))\n\n\n\nplt.plot(x,y,'bo',markersize=5,label='Observed')\nxprime = np.array([x for x in range(1,days,1)])\nyprime = my_logistic(xprime,a,b,c)\nplt.plot(xprime,yprime,label='Predicted')\n\nxpred = np.array([x for x in range(1,days,1)])\nypred_dw = my_logistic(xpred,pars[0],val_dw,pars[2])\nypred_up = my_logistic(xpred,pars[0],val_up,pars[2])\n\nplt.fill_between(xpred, ypred_up,ypred_dw,color = 'k', alpha = 0.1,label='95% CI')\n\nplt.title(\"Logistic growth model [\"+tag+\"]\",fontsize=18)\nplt.xlabel(\"Days since \"+days_since,fontsize=15)\nplt.ylabel(\"Percent of patients \",fontsize=15)\n\nplt.legend()\nplt.savefig(\"log_pred_best_fit\"+tag+\".png\")\nplt.close()\n\n\ngr=b;dt = 70/(gr*100);print(dt)\ngr=val_up;dt = 70/(gr*100);print(dt)\ngr=val_dw;dt = 70/(gr*100);print(dt)\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generate_launch_description():
return LaunchDescription([Node(package='beckhoff_ros', executable=
'beckhoff_ros_node', name='beckhoff_ros_node', parameters=[params],
output='screen')])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
params = os.path.join('INSERT_PATH/src/beckhoff_ros', 'config', 'params.yaml')
def generate_launch_description():
return LaunchDescription([Node(package='beckhoff_ros', executable=
'beckhoff_ros_node', name='beckhoff_ros_node', parameters=[params],
output='screen')])
<|reserved_special_token_1|>
from launch import LaunchDescription
from launch_ros.actions import Node
import os
params = os.path.join('INSERT_PATH/src/beckhoff_ros', 'config', 'params.yaml')
def generate_launch_description():
return LaunchDescription([Node(package='beckhoff_ros', executable=
'beckhoff_ros_node', name='beckhoff_ros_node', parameters=[params],
output='screen')])
|
flexible
|
{
"blob_id": "ae4f8eb71939ff212d05d12f65edeaecf66f2205",
"index": 4874,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef generate_launch_description():\n return LaunchDescription([Node(package='beckhoff_ros', executable=\n 'beckhoff_ros_node', name='beckhoff_ros_node', parameters=[params],\n output='screen')])\n",
"step-3": "<mask token>\nparams = os.path.join('INSERT_PATH/src/beckhoff_ros', 'config', 'params.yaml')\n\n\ndef generate_launch_description():\n return LaunchDescription([Node(package='beckhoff_ros', executable=\n 'beckhoff_ros_node', name='beckhoff_ros_node', parameters=[params],\n output='screen')])\n",
"step-4": "from launch import LaunchDescription\nfrom launch_ros.actions import Node\nimport os\nparams = os.path.join('INSERT_PATH/src/beckhoff_ros', 'config', 'params.yaml')\n\n\ndef generate_launch_description():\n return LaunchDescription([Node(package='beckhoff_ros', executable=\n 'beckhoff_ros_node', name='beckhoff_ros_node', parameters=[params],\n output='screen')])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Have the function CharlietheDog(strArr) read the array of strings stored in strArr which
will be a 4x4 matrix of the characters 'C', 'H', 'F', 'O', where C represents Charlie the dog,
H represents its home, F represents dog food, and O represents and empty space in the grid.
Your goal is to figure out the least amount of moves required to get Charlie to grab each piece of food in the grid by moving
up, down, left, or right, and then make it home right after.
Charlie cannot move onto the home before all pieces of food have been collected.
For example: if strArr is ["FOOF", "OCOO", "OOOH", "FOOO"], then this looks like the following grid:
F O O F
O C O O
O O O H
F O O O
For the input above, the least amount of steps where the dog can reach each piece of food,
and then return home is 11 steps, so your program should return the number 11.
The grid will always contain between 1 and 8 pieces of food.
Use the Parameter Testing feature in the box below to test your code with different arguments.
"""
from itertools import permutations
def CharlietheDog(strArr):
def walk(food_home, dog, matriz, steps=0):
food_home_dx = food_home[0][0] - dog[0]
food_home_dy = food_home[0][1] - dog[1]
walk_x = food_home_dx/(abs(food_home_dx) if food_home_dx != 0 else 1)
walk_y = food_home_dy/(abs(food_home_dy) if food_home_dy != 0 else 1)
steps += abs(walk_x) + abs(walk_y)
dog = (dog[0] + walk_x, dog[1] + walk_y)
if food_home[0] == dog:
food_home = food_home[1:]
food_home_size = len(food_home)
if food_home_size <= 0:
return steps
return walk(food_home, dog, matriz, steps)
food = []
home = None
dog = None
for i in range(len(strArr)):
for j in range(len(strArr[i])):
if strArr[i][j] == 'F':
food.append((i, j))
if strArr[i][j] == 'H':
home = (i, j)
if strArr[i][j] == 'C':
dog = (i, j)
foods = permutations(food)
min_steps = None
for food in foods:
food_home = food + (home, )
steps = walk(food_home, dog, strArr)
if min_steps == None or steps < min_steps:
min_steps = steps
return int(min_steps)
# keep this function call here
print (CharlietheDog(raw_input()))
|
normal
|
{
"blob_id": "731110b02c8a09dc84042a99c14eef990ae33cd2",
"index": 5913,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef CharlietheDog(strArr):\n\n def walk(food_home, dog, matriz, steps=0):\n food_home_dx = food_home[0][0] - dog[0]\n food_home_dy = food_home[0][1] - dog[1]\n walk_x = food_home_dx / (abs(food_home_dx) if food_home_dx != 0 else 1)\n walk_y = food_home_dy / (abs(food_home_dy) if food_home_dy != 0 else 1)\n steps += abs(walk_x) + abs(walk_y)\n dog = dog[0] + walk_x, dog[1] + walk_y\n if food_home[0] == dog:\n food_home = food_home[1:]\n food_home_size = len(food_home)\n if food_home_size <= 0:\n return steps\n return walk(food_home, dog, matriz, steps)\n food = []\n home = None\n dog = None\n for i in range(len(strArr)):\n for j in range(len(strArr[i])):\n if strArr[i][j] == 'F':\n food.append((i, j))\n if strArr[i][j] == 'H':\n home = i, j\n if strArr[i][j] == 'C':\n dog = i, j\n foods = permutations(food)\n min_steps = None\n for food in foods:\n food_home = food + (home,)\n steps = walk(food_home, dog, strArr)\n if min_steps == None or steps < min_steps:\n min_steps = steps\n return int(min_steps)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef CharlietheDog(strArr):\n\n def walk(food_home, dog, matriz, steps=0):\n food_home_dx = food_home[0][0] - dog[0]\n food_home_dy = food_home[0][1] - dog[1]\n walk_x = food_home_dx / (abs(food_home_dx) if food_home_dx != 0 else 1)\n walk_y = food_home_dy / (abs(food_home_dy) if food_home_dy != 0 else 1)\n steps += abs(walk_x) + abs(walk_y)\n dog = dog[0] + walk_x, dog[1] + walk_y\n if food_home[0] == dog:\n food_home = food_home[1:]\n food_home_size = len(food_home)\n if food_home_size <= 0:\n return steps\n return walk(food_home, dog, matriz, steps)\n food = []\n home = None\n dog = None\n for i in range(len(strArr)):\n for j in range(len(strArr[i])):\n if strArr[i][j] == 'F':\n food.append((i, j))\n if strArr[i][j] == 'H':\n home = i, j\n if strArr[i][j] == 'C':\n dog = i, j\n foods = permutations(food)\n min_steps = None\n for food in foods:\n food_home = food + (home,)\n steps = walk(food_home, dog, strArr)\n if min_steps == None or steps < min_steps:\n min_steps = steps\n return int(min_steps)\n\n\nprint(CharlietheDog(raw_input()))\n",
"step-4": "<mask token>\nfrom itertools import permutations\n\n\ndef CharlietheDog(strArr):\n\n def walk(food_home, dog, matriz, steps=0):\n food_home_dx = food_home[0][0] - dog[0]\n food_home_dy = food_home[0][1] - dog[1]\n walk_x = food_home_dx / (abs(food_home_dx) if food_home_dx != 0 else 1)\n walk_y = food_home_dy / (abs(food_home_dy) if food_home_dy != 0 else 1)\n steps += abs(walk_x) + abs(walk_y)\n dog = dog[0] + walk_x, dog[1] + walk_y\n if food_home[0] == dog:\n food_home = food_home[1:]\n food_home_size = len(food_home)\n if food_home_size <= 0:\n return steps\n return walk(food_home, dog, matriz, steps)\n food = []\n home = None\n dog = None\n for i in range(len(strArr)):\n for j in range(len(strArr[i])):\n if strArr[i][j] == 'F':\n food.append((i, j))\n if strArr[i][j] == 'H':\n home = i, j\n if strArr[i][j] == 'C':\n dog = i, j\n foods = permutations(food)\n min_steps = None\n for food in foods:\n food_home = food + (home,)\n steps = walk(food_home, dog, strArr)\n if min_steps == None or steps < min_steps:\n min_steps = steps\n return int(min_steps)\n\n\nprint(CharlietheDog(raw_input()))\n",
"step-5": "\"\"\"\nHave the function CharlietheDog(strArr) read the array of strings stored in strArr which \nwill be a 4x4 matrix of the characters 'C', 'H', 'F', 'O', where C represents Charlie the dog,\n H represents its home, F represents dog food, and O represents and empty space in the grid. \n Your goal is to figure out the least amount of moves required to get Charlie to grab each piece of food in the grid by moving\n up, down, left, or right, and then make it home right after. \n Charlie cannot move onto the home before all pieces of food have been collected. \n For example: if strArr is [\"FOOF\", \"OCOO\", \"OOOH\", \"FOOO\"], then this looks like the following grid: \n \n F O O F\n O C O O\n O O O H\n F O O O \n\nFor the input above, the least amount of steps where the dog can reach each piece of food, \nand then return home is 11 steps, so your program should return the number 11. \nThe grid will always contain between 1 and 8 pieces of food. \n\nUse the Parameter Testing feature in the box below to test your code with different arguments.\n\"\"\"\n\nfrom itertools import permutations \n\ndef CharlietheDog(strArr):\n def walk(food_home, dog, matriz, steps=0):\n\n food_home_dx = food_home[0][0] - dog[0]\n food_home_dy = food_home[0][1] - dog[1]\n\n walk_x = food_home_dx/(abs(food_home_dx) if food_home_dx != 0 else 1)\n walk_y = food_home_dy/(abs(food_home_dy) if food_home_dy != 0 else 1)\n\n steps += abs(walk_x) + abs(walk_y)\n\n dog = (dog[0] + walk_x, dog[1] + walk_y)\n\n if food_home[0] == dog:\n food_home = food_home[1:]\n\n food_home_size = len(food_home)\n if food_home_size <= 0:\n return steps\n \n return walk(food_home, dog, matriz, steps)\n\n food = []\n home = None\n dog = None\n\n for i in range(len(strArr)):\n for j in range(len(strArr[i])):\n if strArr[i][j] == 'F':\n food.append((i, j))\n if strArr[i][j] == 'H':\n home = (i, j)\n if strArr[i][j] == 'C':\n dog = (i, j)\n\n foods = permutations(food)\n\n min_steps = None\n for food in foods:\n food_home = food + (home, )\n steps = walk(food_home, dog, strArr)\n if min_steps == None or steps < min_steps:\n min_steps = steps\n\n return int(min_steps)\n\n\n# keep this function call here \nprint (CharlietheDog(raw_input())) \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse_lazy
from django.shortcuts import get_object_or_404
from rest_framework import status, viewsets
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from musette import models, realtime, utils
from musette.api import serializers
from musette.api.permissions import ForumPermissions, IsReadOnly
# ViewSets for user
class UserViewSet(viewsets.ReadOnlyModelViewSet):
User = get_user_model()
queryset = User.objects.all()
serializer_class = serializers.UserSerializer
lookup_field = 'username'
# ViewSets for categiry
class CategoryViewSet(viewsets.ReadOnlyModelViewSet):
queryset = models.Category.objects.all()
serializer_class = serializers.CategorySerializer
# ViewSets for forum
class ForumViewSet(viewsets.ReadOnlyModelViewSet):
queryset = models.Forum.objects.all()
serializer_class = serializers.ForumSerializer
# ViewSets for topic
class TopicViewSet(viewsets.ModelViewSet):
queryset = models.Topic.objects.all()
serializer_class = serializers.TopicSerializer
permission_classes = (
IsAuthenticatedOrReadOnly, ForumPermissions,
)
def get_permissions(self):
# If is troll then only is read only
if self.request.user.is_authenticated():
if self.request.user.user.is_troll:
self.permission_classes = [IsReadOnly, ]
return super(TopicViewSet, self).get_permissions()
def perform_create(self, serializer):
request = self.request
is_my_user = int(request.data['user']) == request.user.id
# If is my user or is superuser can create
if is_my_user or request.user.is_superuser:
forum_id = request.data['forum']
forum = get_object_or_404(models.Forum, pk=forum_id)
category = forum.category.name
# If has permissions
if utils.user_can_create_topic(category, forum, request.user):
# Save the record topic
if serializer.is_valid():
# If the forum is moderate send email
serializer = utils.check_moderate_topic_email(
request, forum, serializer
)
# Save record
topic = serializer.save()
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
# Parameters for realtime
photo = utils.get_photo_profile(request.user.id)
username = request.user.username
forum_name = forum.name
# Get moderators forum and send notification
list_us = utils.get_moderators_and_send_notification_topic(
request, forum, topic
)
# Data necessary for realtime
data = realtime.data_base_realtime(
topic, photo, forum_name, username
)
data['is_topic'] = True
data['is_comment'] = False
# Send new notification realtime
realtime.new_notification(data, list_us)
return Response(
serializer.data, status=status.HTTP_201_CREATED
)
else:
raise PermissionDenied({
"message": "You don't have permission to access"
})
else:
raise PermissionDenied({
"message": "Not your user"
})
# ViewSets for register
class RegisterViewSet(viewsets.ModelViewSet):
queryset = models.Register.objects.all()
serializer_class = serializers.RegisterSerializer
permission_classes = (IsAuthenticatedOrReadOnly, ForumPermissions,)
def get_permissions(self):
# If is troll then only is read only
if self.request.user.is_authenticated():
if self.request.user.user.is_troll:
self.permission_classes = [IsReadOnly, ]
return super(RegisterViewSet, self).get_permissions()
def create(self, request, **kwargs):
is_my_user = int(request.data['user']) == request.user.id
# If is my user or is superuser can create
if is_my_user or request.user.is_superuser:
forum_id = request.data['forum']
exists_register = models.Register.objects.filter(
forum_id=forum_id, user=request.user
)
# If the register not exists
if exists_register.count() == 0:
return super(RegisterViewSet, self).create(request, **kwargs)
else:
raise PermissionDenied({
"message": "You are already Registered"
})
else:
raise PermissionDenied({
"message": "Not your user"
})
# ViewSets for comment
class CommentViewSet(viewsets.ModelViewSet):
queryset = models.Comment.objects.all()
serializer_class = serializers.CommentSerializer
permission_classes = (IsAuthenticatedOrReadOnly, ForumPermissions,)
def get_permissions(self):
# If is troll then only is read only
if self.request.user.is_authenticated():
if self.request.user.user.is_troll:
self.permission_classes = [IsReadOnly, ]
return super(CommentViewSet, self).get_permissions()
def perform_create(self, serializer):
request = self.request
is_my_user = int(request.data['user']) == request.user.id
# If is my user or is superuser can create
if is_my_user or request.user.is_superuser:
# Save the record comment
if serializer.is_valid():
comment = serializer.save()
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
topic_id = request.data['topic']
topic = get_object_or_404(models.Topic, pk=topic_id)
# Parameters for notification comments
photo = utils.get_photo_profile(request.user.id)
username = request.user.username
forum = topic.forum.name
# Send notifications comment
params = utils.get_users_and_send_notification_comment(
request, topic, comment
)
list_us = params['list_us']
list_email = params['list_email']
# Get url for email
url = reverse_lazy('topic', kwargs={
'category': topic.forum.category, 'forum': forum,
'slug': topic.slug, 'idtopic': str(topic.idtopic)
})
# Send e mail
utils.send_mail_comment(str(url), list_email)
# Data necessary for realtime
data = realtime.data_base_realtime(topic, photo, forum, username)
data['is_topic'] = False
data['is_comment'] = True
# Send new notification realtime
realtime.new_notification(data, list_us)
# Send new comment in realtime
comment_description = request.data['description']
realtime.new_comment(data, comment_description)
return Response(
serializer.data, status=status.HTTP_201_CREATED
)
else:
raise PermissionDenied({
"message": "Not your user"
})
# ViewSets for profile
class ProfileViewSet(viewsets.ReadOnlyModelViewSet):
queryset = models.Profile.objects.all()
serializer_class = serializers.ProfileSerializer
|
normal
|
{
"blob_id": "4d059d1ca407ef60f1fbf9d8bead1cf45c90c28a",
"index": 8227,
"step-1": "<mask token>\n\n\nclass RegisterViewSet(viewsets.ModelViewSet):\n queryset = models.Register.objects.all()\n serializer_class = serializers.RegisterSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(RegisterViewSet, self).get_permissions()\n\n def create(self, request, **kwargs):\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n exists_register = models.Register.objects.filter(forum_id=\n forum_id, user=request.user)\n if exists_register.count() == 0:\n return super(RegisterViewSet, self).create(request, **kwargs)\n else:\n raise PermissionDenied({'message':\n 'You are already Registered'})\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass CommentViewSet(viewsets.ModelViewSet):\n queryset = models.Comment.objects.all()\n serializer_class = serializers.CommentSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(CommentViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n if serializer.is_valid():\n comment = serializer.save()\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n topic_id = request.data['topic']\n topic = get_object_or_404(models.Topic, pk=topic_id)\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum = topic.forum.name\n params = utils.get_users_and_send_notification_comment(request,\n topic, comment)\n list_us = params['list_us']\n list_email = params['list_email']\n url = reverse_lazy('topic', kwargs={'category': topic.forum.\n category, 'forum': forum, 'slug': topic.slug, 'idtopic':\n str(topic.idtopic)})\n utils.send_mail_comment(str(url), list_email)\n data = realtime.data_base_realtime(topic, photo, forum, username)\n data['is_topic'] = False\n data['is_comment'] = True\n realtime.new_notification(data, list_us)\n comment_description = request.data['description']\n realtime.new_comment(data, comment_description)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass ProfileViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Profile.objects.all()\n serializer_class = serializers.ProfileSerializer\n",
"step-2": "<mask token>\n\n\nclass CategoryViewSet(viewsets.ReadOnlyModelViewSet):\n <mask token>\n <mask token>\n\n\nclass ForumViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Forum.objects.all()\n serializer_class = serializers.ForumSerializer\n\n\nclass TopicViewSet(viewsets.ModelViewSet):\n queryset = models.Topic.objects.all()\n serializer_class = serializers.TopicSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(TopicViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n forum = get_object_or_404(models.Forum, pk=forum_id)\n category = forum.category.name\n if utils.user_can_create_topic(category, forum, request.user):\n if serializer.is_valid():\n serializer = utils.check_moderate_topic_email(request,\n forum, serializer)\n topic = serializer.save()\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum_name = forum.name\n list_us = utils.get_moderators_and_send_notification_topic(\n request, forum, topic)\n data = realtime.data_base_realtime(topic, photo, forum_name,\n username)\n data['is_topic'] = True\n data['is_comment'] = False\n realtime.new_notification(data, list_us)\n return Response(serializer.data, status=status.HTTP_201_CREATED\n )\n else:\n raise PermissionDenied({'message':\n \"You don't have permission to access\"})\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass RegisterViewSet(viewsets.ModelViewSet):\n queryset = models.Register.objects.all()\n serializer_class = serializers.RegisterSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(RegisterViewSet, self).get_permissions()\n\n def create(self, request, **kwargs):\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n exists_register = models.Register.objects.filter(forum_id=\n forum_id, user=request.user)\n if exists_register.count() == 0:\n return super(RegisterViewSet, self).create(request, **kwargs)\n else:\n raise PermissionDenied({'message':\n 'You are already Registered'})\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass CommentViewSet(viewsets.ModelViewSet):\n queryset = models.Comment.objects.all()\n serializer_class = serializers.CommentSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(CommentViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n if serializer.is_valid():\n comment = serializer.save()\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n topic_id = request.data['topic']\n topic = get_object_or_404(models.Topic, pk=topic_id)\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum = topic.forum.name\n params = utils.get_users_and_send_notification_comment(request,\n topic, comment)\n list_us = params['list_us']\n list_email = params['list_email']\n url = reverse_lazy('topic', kwargs={'category': topic.forum.\n category, 'forum': forum, 'slug': topic.slug, 'idtopic':\n str(topic.idtopic)})\n utils.send_mail_comment(str(url), list_email)\n data = realtime.data_base_realtime(topic, photo, forum, username)\n data['is_topic'] = False\n data['is_comment'] = True\n realtime.new_notification(data, list_us)\n comment_description = request.data['description']\n realtime.new_comment(data, comment_description)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass ProfileViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Profile.objects.all()\n serializer_class = serializers.ProfileSerializer\n",
"step-3": "<mask token>\n\n\nclass CategoryViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Category.objects.all()\n serializer_class = serializers.CategorySerializer\n\n\nclass ForumViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Forum.objects.all()\n serializer_class = serializers.ForumSerializer\n\n\nclass TopicViewSet(viewsets.ModelViewSet):\n queryset = models.Topic.objects.all()\n serializer_class = serializers.TopicSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(TopicViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n forum = get_object_or_404(models.Forum, pk=forum_id)\n category = forum.category.name\n if utils.user_can_create_topic(category, forum, request.user):\n if serializer.is_valid():\n serializer = utils.check_moderate_topic_email(request,\n forum, serializer)\n topic = serializer.save()\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum_name = forum.name\n list_us = utils.get_moderators_and_send_notification_topic(\n request, forum, topic)\n data = realtime.data_base_realtime(topic, photo, forum_name,\n username)\n data['is_topic'] = True\n data['is_comment'] = False\n realtime.new_notification(data, list_us)\n return Response(serializer.data, status=status.HTTP_201_CREATED\n )\n else:\n raise PermissionDenied({'message':\n \"You don't have permission to access\"})\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass RegisterViewSet(viewsets.ModelViewSet):\n queryset = models.Register.objects.all()\n serializer_class = serializers.RegisterSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(RegisterViewSet, self).get_permissions()\n\n def create(self, request, **kwargs):\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n exists_register = models.Register.objects.filter(forum_id=\n forum_id, user=request.user)\n if exists_register.count() == 0:\n return super(RegisterViewSet, self).create(request, **kwargs)\n else:\n raise PermissionDenied({'message':\n 'You are already Registered'})\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass CommentViewSet(viewsets.ModelViewSet):\n queryset = models.Comment.objects.all()\n serializer_class = serializers.CommentSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(CommentViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n if serializer.is_valid():\n comment = serializer.save()\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n topic_id = request.data['topic']\n topic = get_object_or_404(models.Topic, pk=topic_id)\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum = topic.forum.name\n params = utils.get_users_and_send_notification_comment(request,\n topic, comment)\n list_us = params['list_us']\n list_email = params['list_email']\n url = reverse_lazy('topic', kwargs={'category': topic.forum.\n category, 'forum': forum, 'slug': topic.slug, 'idtopic':\n str(topic.idtopic)})\n utils.send_mail_comment(str(url), list_email)\n data = realtime.data_base_realtime(topic, photo, forum, username)\n data['is_topic'] = False\n data['is_comment'] = True\n realtime.new_notification(data, list_us)\n comment_description = request.data['description']\n realtime.new_comment(data, comment_description)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass ProfileViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Profile.objects.all()\n serializer_class = serializers.ProfileSerializer\n",
"step-4": "<mask token>\n\n\nclass UserViewSet(viewsets.ReadOnlyModelViewSet):\n User = get_user_model()\n queryset = User.objects.all()\n serializer_class = serializers.UserSerializer\n lookup_field = 'username'\n\n\nclass CategoryViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Category.objects.all()\n serializer_class = serializers.CategorySerializer\n\n\nclass ForumViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Forum.objects.all()\n serializer_class = serializers.ForumSerializer\n\n\nclass TopicViewSet(viewsets.ModelViewSet):\n queryset = models.Topic.objects.all()\n serializer_class = serializers.TopicSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(TopicViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n forum = get_object_or_404(models.Forum, pk=forum_id)\n category = forum.category.name\n if utils.user_can_create_topic(category, forum, request.user):\n if serializer.is_valid():\n serializer = utils.check_moderate_topic_email(request,\n forum, serializer)\n topic = serializer.save()\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum_name = forum.name\n list_us = utils.get_moderators_and_send_notification_topic(\n request, forum, topic)\n data = realtime.data_base_realtime(topic, photo, forum_name,\n username)\n data['is_topic'] = True\n data['is_comment'] = False\n realtime.new_notification(data, list_us)\n return Response(serializer.data, status=status.HTTP_201_CREATED\n )\n else:\n raise PermissionDenied({'message':\n \"You don't have permission to access\"})\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass RegisterViewSet(viewsets.ModelViewSet):\n queryset = models.Register.objects.all()\n serializer_class = serializers.RegisterSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(RegisterViewSet, self).get_permissions()\n\n def create(self, request, **kwargs):\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n exists_register = models.Register.objects.filter(forum_id=\n forum_id, user=request.user)\n if exists_register.count() == 0:\n return super(RegisterViewSet, self).create(request, **kwargs)\n else:\n raise PermissionDenied({'message':\n 'You are already Registered'})\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass CommentViewSet(viewsets.ModelViewSet):\n queryset = models.Comment.objects.all()\n serializer_class = serializers.CommentSerializer\n permission_classes = IsAuthenticatedOrReadOnly, ForumPermissions\n\n def get_permissions(self):\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly]\n return super(CommentViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n if is_my_user or request.user.is_superuser:\n if serializer.is_valid():\n comment = serializer.save()\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n topic_id = request.data['topic']\n topic = get_object_or_404(models.Topic, pk=topic_id)\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum = topic.forum.name\n params = utils.get_users_and_send_notification_comment(request,\n topic, comment)\n list_us = params['list_us']\n list_email = params['list_email']\n url = reverse_lazy('topic', kwargs={'category': topic.forum.\n category, 'forum': forum, 'slug': topic.slug, 'idtopic':\n str(topic.idtopic)})\n utils.send_mail_comment(str(url), list_email)\n data = realtime.data_base_realtime(topic, photo, forum, username)\n data['is_topic'] = False\n data['is_comment'] = True\n realtime.new_notification(data, list_us)\n comment_description = request.data['description']\n realtime.new_comment(data, comment_description)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n raise PermissionDenied({'message': 'Not your user'})\n\n\nclass ProfileViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Profile.objects.all()\n serializer_class = serializers.ProfileSerializer\n",
"step-5": "from django.contrib.auth import get_user_model\nfrom django.core.urlresolvers import reverse_lazy\nfrom django.shortcuts import get_object_or_404\n\nfrom rest_framework import status, viewsets\nfrom rest_framework.exceptions import PermissionDenied\nfrom rest_framework.permissions import IsAuthenticatedOrReadOnly\nfrom rest_framework.response import Response\n\nfrom musette import models, realtime, utils\nfrom musette.api import serializers\nfrom musette.api.permissions import ForumPermissions, IsReadOnly\n\n\n# ViewSets for user\nclass UserViewSet(viewsets.ReadOnlyModelViewSet):\n User = get_user_model()\n queryset = User.objects.all()\n serializer_class = serializers.UserSerializer\n lookup_field = 'username'\n\n\n# ViewSets for categiry\nclass CategoryViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Category.objects.all()\n serializer_class = serializers.CategorySerializer\n\n\n# ViewSets for forum\nclass ForumViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Forum.objects.all()\n serializer_class = serializers.ForumSerializer\n\n\n# ViewSets for topic\nclass TopicViewSet(viewsets.ModelViewSet):\n queryset = models.Topic.objects.all()\n serializer_class = serializers.TopicSerializer\n permission_classes = (\n IsAuthenticatedOrReadOnly, ForumPermissions,\n )\n\n def get_permissions(self):\n # If is troll then only is read only\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly, ]\n return super(TopicViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n\n # If is my user or is superuser can create\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n forum = get_object_or_404(models.Forum, pk=forum_id)\n category = forum.category.name\n # If has permissions\n if utils.user_can_create_topic(category, forum, request.user):\n # Save the record topic\n if serializer.is_valid():\n # If the forum is moderate send email\n serializer = utils.check_moderate_topic_email(\n request, forum, serializer\n )\n # Save record\n topic = serializer.save()\n else:\n return Response(\n serializer.errors,\n status=status.HTTP_400_BAD_REQUEST\n )\n\n # Parameters for realtime\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum_name = forum.name\n\n # Get moderators forum and send notification\n list_us = utils.get_moderators_and_send_notification_topic(\n request, forum, topic\n )\n\n # Data necessary for realtime\n data = realtime.data_base_realtime(\n topic, photo, forum_name, username\n )\n data['is_topic'] = True\n data['is_comment'] = False\n\n # Send new notification realtime\n realtime.new_notification(data, list_us)\n\n return Response(\n serializer.data, status=status.HTTP_201_CREATED\n )\n else:\n raise PermissionDenied({\n \"message\": \"You don't have permission to access\"\n })\n else:\n raise PermissionDenied({\n \"message\": \"Not your user\"\n })\n\n\n# ViewSets for register\nclass RegisterViewSet(viewsets.ModelViewSet):\n queryset = models.Register.objects.all()\n serializer_class = serializers.RegisterSerializer\n permission_classes = (IsAuthenticatedOrReadOnly, ForumPermissions,)\n\n def get_permissions(self):\n # If is troll then only is read only\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly, ]\n return super(RegisterViewSet, self).get_permissions()\n\n def create(self, request, **kwargs):\n is_my_user = int(request.data['user']) == request.user.id\n # If is my user or is superuser can create\n if is_my_user or request.user.is_superuser:\n forum_id = request.data['forum']\n exists_register = models.Register.objects.filter(\n forum_id=forum_id, user=request.user\n )\n\n # If the register not exists\n if exists_register.count() == 0:\n return super(RegisterViewSet, self).create(request, **kwargs)\n else:\n raise PermissionDenied({\n \"message\": \"You are already Registered\"\n })\n else:\n raise PermissionDenied({\n \"message\": \"Not your user\"\n })\n\n\n# ViewSets for comment\nclass CommentViewSet(viewsets.ModelViewSet):\n queryset = models.Comment.objects.all()\n serializer_class = serializers.CommentSerializer\n permission_classes = (IsAuthenticatedOrReadOnly, ForumPermissions,)\n\n def get_permissions(self):\n # If is troll then only is read only\n if self.request.user.is_authenticated():\n if self.request.user.user.is_troll:\n self.permission_classes = [IsReadOnly, ]\n return super(CommentViewSet, self).get_permissions()\n\n def perform_create(self, serializer):\n request = self.request\n is_my_user = int(request.data['user']) == request.user.id\n # If is my user or is superuser can create\n if is_my_user or request.user.is_superuser:\n # Save the record comment\n if serializer.is_valid():\n comment = serializer.save()\n else:\n return Response(\n serializer.errors,\n status=status.HTTP_400_BAD_REQUEST\n )\n\n topic_id = request.data['topic']\n topic = get_object_or_404(models.Topic, pk=topic_id)\n\n # Parameters for notification comments\n photo = utils.get_photo_profile(request.user.id)\n username = request.user.username\n forum = topic.forum.name\n\n # Send notifications comment\n params = utils.get_users_and_send_notification_comment(\n request, topic, comment\n )\n list_us = params['list_us']\n list_email = params['list_email']\n\n # Get url for email\n url = reverse_lazy('topic', kwargs={\n 'category': topic.forum.category, 'forum': forum,\n 'slug': topic.slug, 'idtopic': str(topic.idtopic)\n })\n\n # Send e mail\n utils.send_mail_comment(str(url), list_email)\n\n # Data necessary for realtime\n data = realtime.data_base_realtime(topic, photo, forum, username)\n data['is_topic'] = False\n data['is_comment'] = True\n\n # Send new notification realtime\n realtime.new_notification(data, list_us)\n\n # Send new comment in realtime\n comment_description = request.data['description']\n realtime.new_comment(data, comment_description)\n\n return Response(\n serializer.data, status=status.HTTP_201_CREATED\n )\n else:\n raise PermissionDenied({\n \"message\": \"Not your user\"\n })\n\n\n# ViewSets for profile\nclass ProfileViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = models.Profile.objects.all()\n serializer_class = serializers.ProfileSerializer\n",
"step-ids": [
10,
17,
18,
20,
22
]
}
|
[
10,
17,
18,
20,
22
] |
# Generated by Django 2.2.17 on 2020-12-05 07:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('service', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='identification',
name='id_card_img',
field=models.ImageField(blank=True, null=True, upload_to='images/img_card/'),
),
migrations.AlterField(
model_name='identification',
name='selfie_img',
field=models.ImageField(blank=True, null=True, upload_to='images/img_selfie/'),
),
]
|
normal
|
{
"blob_id": "b6a0a49e05fbc0ac7673d6c9e8ca4d263c8bb5cd",
"index": 7132,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('service', '0001_initial')]\n operations = [migrations.AlterField(model_name='identification', name=\n 'id_card_img', field=models.ImageField(blank=True, null=True,\n upload_to='images/img_card/')), migrations.AlterField(model_name=\n 'identification', name='selfie_img', field=models.ImageField(blank=\n True, null=True, upload_to='images/img_selfie/'))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('service', '0001_initial')]\n operations = [migrations.AlterField(model_name='identification', name=\n 'id_card_img', field=models.ImageField(blank=True, null=True,\n upload_to='images/img_card/')), migrations.AlterField(model_name=\n 'identification', name='selfie_img', field=models.ImageField(blank=\n True, null=True, upload_to='images/img_selfie/'))]\n",
"step-5": "# Generated by Django 2.2.17 on 2020-12-05 07:43\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('service', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='identification',\n name='id_card_img',\n field=models.ImageField(blank=True, null=True, upload_to='images/img_card/'),\n ),\n migrations.AlterField(\n model_name='identification',\n name='selfie_img',\n field=models.ImageField(blank=True, null=True, upload_to='images/img_selfie/'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from ingredients_http.schematics.types import ArrowType, KubeName
from schematics import Model
from schematics.exceptions import ValidationError
from schematics.types import UUIDType, IntType, StringType
from deli.kubernetes.resources.v1alpha1.keypair.keypair import Keypair
class ParamsKeypair(Model):
keypair_name = KubeName(required=True)
class ParamsListKeypair(Model):
limit = IntType(default=100, max_value=100, min_value=1)
marker = UUIDType()
class RequestCreateKeypair(Model):
name = KubeName(required=True, min_length=3)
public_key = StringType(required=True)
def validate_public_key(self, data, value):
try:
load_ssh_public_key(value.encode(), default_backend())
except ValueError:
raise ValidationError("public_key could not be decoded or is not in the proper format")
except UnsupportedAlgorithm:
raise ValidationError("public_key serialization type is not supported")
return value
class ResponseKeypair(Model):
name = KubeName(required=True, min_length=3)
public_key = StringType(required=True)
created_at = ArrowType(required=True)
updated_at = ArrowType(required=True)
@classmethod
def from_database(cls, keypair: Keypair):
model = cls()
model.name = keypair.name
model.public_key = keypair.public_key
model.created_at = keypair.created_at
model.updated_at = keypair.updated_at
return model
|
normal
|
{
"blob_id": "a521220ac287a840b5c69e2d0f33daa588132083",
"index": 4983,
"step-1": "<mask token>\n\n\nclass RequestCreateKeypair(Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-2": "<mask token>\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\n 'public_key could not be decoded or is not in the proper format'\n )\n except UnsupportedAlgorithm:\n raise ValidationError(\n 'public_key serialization type is not supported')\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-3": "<mask token>\n\n\nclass ParamsKeypair(Model):\n keypair_name = KubeName(required=True)\n\n\nclass ParamsListKeypair(Model):\n limit = IntType(default=100, max_value=100, min_value=1)\n marker = UUIDType()\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\n 'public_key could not be decoded or is not in the proper format'\n )\n except UnsupportedAlgorithm:\n raise ValidationError(\n 'public_key serialization type is not supported')\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-4": "from cryptography.exceptions import UnsupportedAlgorithm\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives.serialization import load_ssh_public_key\nfrom ingredients_http.schematics.types import ArrowType, KubeName\nfrom schematics import Model\nfrom schematics.exceptions import ValidationError\nfrom schematics.types import UUIDType, IntType, StringType\nfrom deli.kubernetes.resources.v1alpha1.keypair.keypair import Keypair\n\n\nclass ParamsKeypair(Model):\n keypair_name = KubeName(required=True)\n\n\nclass ParamsListKeypair(Model):\n limit = IntType(default=100, max_value=100, min_value=1)\n marker = UUIDType()\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\n 'public_key could not be decoded or is not in the proper format'\n )\n except UnsupportedAlgorithm:\n raise ValidationError(\n 'public_key serialization type is not supported')\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-5": "from cryptography.exceptions import UnsupportedAlgorithm\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives.serialization import load_ssh_public_key\nfrom ingredients_http.schematics.types import ArrowType, KubeName\nfrom schematics import Model\nfrom schematics.exceptions import ValidationError\nfrom schematics.types import UUIDType, IntType, StringType\n\nfrom deli.kubernetes.resources.v1alpha1.keypair.keypair import Keypair\n\n\nclass ParamsKeypair(Model):\n keypair_name = KubeName(required=True)\n\n\nclass ParamsListKeypair(Model):\n limit = IntType(default=100, max_value=100, min_value=1)\n marker = UUIDType()\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\"public_key could not be decoded or is not in the proper format\")\n except UnsupportedAlgorithm:\n raise ValidationError(\"public_key serialization type is not supported\")\n\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n\n return model\n",
"step-ids": [
4,
6,
10,
11,
12
]
}
|
[
4,
6,
10,
11,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def combo(self, n):
if n == 0:
return ['0']
elif n == 1:
return ['0', '1']
else:
prev = self.combo(n - 1)
new = []
for combo in prev:
new.append('0' + combo)
prev.reverse()
for combo in prev:
new.append('1' + combo)
return new
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def combo(self, n):
if n == 0:
return ['0']
elif n == 1:
return ['0', '1']
else:
prev = self.combo(n - 1)
new = []
for combo in prev:
new.append('0' + combo)
prev.reverse()
for combo in prev:
new.append('1' + combo)
return new
def grayCode(self, n: int) ->List[int]:
combo = self.combo(n)
ret = []
for c in combo:
ret.append(int(c, 2))
return ret
<|reserved_special_token_1|>
class Solution:
# This would generate all permutations, but that's not what this question asks for
# def combo(self, cur, n, ret, arr):
# if cur == n:
# arr.append(ret)
# return
# self.combo(cur+1, n, ret + "1", arr)
# self.combo(cur+1, n, ret + "0", arr)
def combo(self, n):
if n == 0:
return ['0']
elif n == 1:
return ['0','1']
else:
prev = self.combo(n-1)
new = []
for combo in prev:
new.append('0' + combo)
prev.reverse()
for combo in prev:
new.append('1' + combo)
return new
def grayCode(self, n: int) -> List[int]:
combo = self.combo(n)
ret = []
for c in combo:
ret.append(int(c, 2))
return ret
|
flexible
|
{
"blob_id": "e9a929dfef327737b54723579d3c57884fe61057",
"index": 7061,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n <mask token>\n",
"step-3": "class Solution:\n\n def combo(self, n):\n if n == 0:\n return ['0']\n elif n == 1:\n return ['0', '1']\n else:\n prev = self.combo(n - 1)\n new = []\n for combo in prev:\n new.append('0' + combo)\n prev.reverse()\n for combo in prev:\n new.append('1' + combo)\n return new\n <mask token>\n",
"step-4": "class Solution:\n\n def combo(self, n):\n if n == 0:\n return ['0']\n elif n == 1:\n return ['0', '1']\n else:\n prev = self.combo(n - 1)\n new = []\n for combo in prev:\n new.append('0' + combo)\n prev.reverse()\n for combo in prev:\n new.append('1' + combo)\n return new\n\n def grayCode(self, n: int) ->List[int]:\n combo = self.combo(n)\n ret = []\n for c in combo:\n ret.append(int(c, 2))\n return ret\n",
"step-5": "class Solution:\n\t# This would generate all permutations, but that's not what this question asks for\n # def combo(self, cur, n, ret, arr):\n # if cur == n:\n # arr.append(ret)\n # return\n # self.combo(cur+1, n, ret + \"1\", arr)\n # self.combo(cur+1, n, ret + \"0\", arr) \n def combo(self, n):\n if n == 0:\n return ['0']\n elif n == 1:\n return ['0','1']\n else:\n prev = self.combo(n-1)\n new = []\n for combo in prev:\n new.append('0' + combo)\n prev.reverse()\n for combo in prev:\n new.append('1' + combo)\n return new\n def grayCode(self, n: int) -> List[int]:\n combo = self.combo(n)\n ret = []\n for c in combo:\n ret.append(int(c, 2))\n return ret\n ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def read_assembly_file(file: str) ->List:
if not os.path.isfile(file):
return [-1, -1, -1, -1, -1, -1]
with open(file, 'r') as f:
file_content_string = f.read()
if 'LKH_Contigs:\nLKH_Objective' in file_content_string:
lkh_gaps = -1
else:
lkh_gaps = len(file_content_string.split('LKH_Contigs:\n')[1].
split('\nLKH_Objective')[0].split('\n')) - 1
lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[
1].split('\n')[0])
lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(
'\n')[0])
if 'AP_Contigs:\nAP_Objective' in file_content_string:
ap_gaps = -1
else:
ap_gaps = len(file_content_string.split('AP_Contigs:\n')[1].
split('\nAP_Objective')[0].split('\n')) - 1
ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]
.split('\n')[0])
ap_time = float(file_content_string.split('AP_Time: ')[1].split(
'\n')[0])
return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]
def read_fasta_stats_file(file: str) ->Dict:
with open(file, 'r') as f:
file_content_string = f.read()
actual_objective_value = int(file_content_string.split(
'Objective function value: ')[1].split('\n')[0])
actual_gaps = int(file_content_string.split('Actual gaps: ')[1].
split('\n')[0])
no_of_reads = int(file_content_string.split('Number of reads: ')[1]
.split('\n')[0])
return [no_of_reads, actual_objective_value, actual_gaps]
def write_assembly_stats(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',
'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',
'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',
'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',
'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',
'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',
'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',
'CalignBAPTime'])
for ref_name in [ref1_name, ref2_name, ref3_name]:
for c in coverages:
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [ref_name, c, length]
row += val['Actual']
row += val['Calign']
row += val['Calign25']
row += val['Calign50']
f_csv.writerow(row)
def write_assembly_stats_tex(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',
'w') as f:
for ref_name in [ref1_name, ref2_name, ref3_name]:
if ref1_name == ref_name:
dashline_active = ''
else:
dashline_active = '\\hdashline\n'
f.write('{}\\bfseries {}\\\\\n'.format(dashline_active, ref_name))
for c in coverages:
f.write('$c = {}$\\\\\n'.format(c))
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [length]
row += [val['Actual'][0]]
row += ['']
row += val['Actual'][1:]
row += ['']
row += [*val['Calign'][0:2], '{0:.2f}'.format(val[
'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.
format(val['Calign'][5])]
row += ['']
row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[
'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.
format(val['Calign25'][5])]
row += ['']
row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[
'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.
format(val['Calign50'][5])]
f.write(' & '.join([str(x) for x in row]) + '\\\\\n')
def write_assembly_stats2(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
refs = [ref1_name, ref2_name]
f_csv.writerow(range(len(refs) * 9))
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for
ref_name in refs for c in coverages for l in average_length_list])
for foo in ['Calign', 'Calign25', 'Calign50']:
for i in range(6):
if i in [2, 5]:
f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,
l][foo][i]) for ref_name in refs for c in coverages for
l in average_length_list])
else:
f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for
ref_name in refs for c in coverages for l in
average_length_list])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_assembly_file(file: str) ->List:
if not os.path.isfile(file):
return [-1, -1, -1, -1, -1, -1]
with open(file, 'r') as f:
file_content_string = f.read()
if 'LKH_Contigs:\nLKH_Objective' in file_content_string:
lkh_gaps = -1
else:
lkh_gaps = len(file_content_string.split('LKH_Contigs:\n')[1].
split('\nLKH_Objective')[0].split('\n')) - 1
lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[
1].split('\n')[0])
lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(
'\n')[0])
if 'AP_Contigs:\nAP_Objective' in file_content_string:
ap_gaps = -1
else:
ap_gaps = len(file_content_string.split('AP_Contigs:\n')[1].
split('\nAP_Objective')[0].split('\n')) - 1
ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]
.split('\n')[0])
ap_time = float(file_content_string.split('AP_Time: ')[1].split(
'\n')[0])
return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]
def read_fasta_stats_file(file: str) ->Dict:
with open(file, 'r') as f:
file_content_string = f.read()
actual_objective_value = int(file_content_string.split(
'Objective function value: ')[1].split('\n')[0])
actual_gaps = int(file_content_string.split('Actual gaps: ')[1].
split('\n')[0])
no_of_reads = int(file_content_string.split('Number of reads: ')[1]
.split('\n')[0])
return [no_of_reads, actual_objective_value, actual_gaps]
def write_assembly_stats(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',
'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',
'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',
'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',
'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',
'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',
'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',
'CalignBAPTime'])
for ref_name in [ref1_name, ref2_name, ref3_name]:
for c in coverages:
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [ref_name, c, length]
row += val['Actual']
row += val['Calign']
row += val['Calign25']
row += val['Calign50']
f_csv.writerow(row)
def write_assembly_stats_tex(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',
'w') as f:
for ref_name in [ref1_name, ref2_name, ref3_name]:
if ref1_name == ref_name:
dashline_active = ''
else:
dashline_active = '\\hdashline\n'
f.write('{}\\bfseries {}\\\\\n'.format(dashline_active, ref_name))
for c in coverages:
f.write('$c = {}$\\\\\n'.format(c))
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [length]
row += [val['Actual'][0]]
row += ['']
row += val['Actual'][1:]
row += ['']
row += [*val['Calign'][0:2], '{0:.2f}'.format(val[
'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.
format(val['Calign'][5])]
row += ['']
row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[
'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.
format(val['Calign25'][5])]
row += ['']
row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[
'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.
format(val['Calign50'][5])]
f.write(' & '.join([str(x) for x in row]) + '\\\\\n')
def write_assembly_stats2(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
refs = [ref1_name, ref2_name]
f_csv.writerow(range(len(refs) * 9))
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for
ref_name in refs for c in coverages for l in average_length_list])
for foo in ['Calign', 'Calign25', 'Calign50']:
for i in range(6):
if i in [2, 5]:
f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,
l][foo][i]) for ref_name in refs for c in coverages for
l in average_length_list])
else:
f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for
ref_name in refs for c in coverages for l in
average_length_list])
<|reserved_special_token_0|>
for ref_number in [1, 2, 3]:
for coverage in coverages:
for length in average_length_list:
ref_name = references[ref_number - 1]
dir = ('/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'
.format(ref_number, coverage, length))
stats_dict[ref_name, coverage, length] = {'Actual':
read_fasta_stats_file(dir + 'fasta.stat'), 'Calign':
read_assembly_file(dir + 'calign.assembly'), 'Calign25':
read_assembly_file(dir + 'calign_0_{}.assembly'.format(
length // 4)), 'Calign50': read_assembly_file(dir +
'calign_0_{}.assembly'.format(length // 2))}
def write_whole_stats() ->None:
headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP',
'CalignBLKH', 'CalignBAP']
vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0,
'CalignAAP': 0, 'CalignBLKH': 0, 'CalignBAP': 0}
atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP':
0, 'CalignBLKH': 0, 'CalignBAP': 0}
with open(DIR + 'assembly_stats.csv', 'r') as f:
f_csv = csv.DictReader(f, delimiter=',')
for row in f_csv:
for elem in headers:
if row['ActualValue'] == row[elem + 'Value']:
vals[elem] += 1
if row['ActualGaps'] == row[elem + 'Gaps']:
gaps[elem] += 1
if row['ActualValue'] == row[elem + 'Value'] and row[
'ActualGaps'] == row[elem + 'Gaps']:
both[elem] += 1
if row['CalignLKHValue'] == row['CalignAPValue']:
atspvsapval['CalignLKH'] += 1
atspvsapval['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue']:
atspvsapval['CalignALKH'] += 1
atspvsapval['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue']:
atspvsapval['CalignBLKH'] += 1
atspvsapval['CalignBAP'] += 1
if row['CalignLKHValue'] == row['CalignAPValue'] and row[
'CalignLKHGaps'] == row['CalignAPGaps']:
atspvsap['CalignLKH'] += 1
atspvsap['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue'] and row[
'CalignALKHGaps'] == row['CalignAAPGaps']:
atspvsap['CalignALKH'] += 1
atspvsap['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue'] and row[
'CalignBLKHGaps'] == row['CalignBAPGaps']:
atspvsap['CalignBLKH'] += 1
atspvsap['CalignBAP'] += 1
with open(DIR + 'complete_stats.csv', 'w') as g:
g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)
g_csv.writeheader()
g_csv.writerow(vals)
g_csv.writerow(gaps)
g_csv.writerow(both)
g_csv.writerow(atspvsapval)
g_csv.writerow(atspvsap)
write_assembly_stats(stats_dict)
write_assembly_stats2(stats_dict)
write_assembly_stats_tex(stats_dict)
write_whole_stats()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
HEADER = ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs',
'APValue', 'APTime', 'ActualObjectiveValue']
Assembly_Stats = namedtuple('Assembly_Stats', HEADER)
dir = (
'/home/andreas/GDrive/workspace/sparsedata/ref1shuffled_c5_l700/calign.assembly'
)
def read_assembly_file(file: str) ->List:
if not os.path.isfile(file):
return [-1, -1, -1, -1, -1, -1]
with open(file, 'r') as f:
file_content_string = f.read()
if 'LKH_Contigs:\nLKH_Objective' in file_content_string:
lkh_gaps = -1
else:
lkh_gaps = len(file_content_string.split('LKH_Contigs:\n')[1].
split('\nLKH_Objective')[0].split('\n')) - 1
lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[
1].split('\n')[0])
lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(
'\n')[0])
if 'AP_Contigs:\nAP_Objective' in file_content_string:
ap_gaps = -1
else:
ap_gaps = len(file_content_string.split('AP_Contigs:\n')[1].
split('\nAP_Objective')[0].split('\n')) - 1
ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]
.split('\n')[0])
ap_time = float(file_content_string.split('AP_Time: ')[1].split(
'\n')[0])
return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]
def read_fasta_stats_file(file: str) ->Dict:
with open(file, 'r') as f:
file_content_string = f.read()
actual_objective_value = int(file_content_string.split(
'Objective function value: ')[1].split('\n')[0])
actual_gaps = int(file_content_string.split('Actual gaps: ')[1].
split('\n')[0])
no_of_reads = int(file_content_string.split('Number of reads: ')[1]
.split('\n')[0])
return [no_of_reads, actual_objective_value, actual_gaps]
def write_assembly_stats(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',
'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',
'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',
'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',
'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',
'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',
'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',
'CalignBAPTime'])
for ref_name in [ref1_name, ref2_name, ref3_name]:
for c in coverages:
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [ref_name, c, length]
row += val['Actual']
row += val['Calign']
row += val['Calign25']
row += val['Calign50']
f_csv.writerow(row)
def write_assembly_stats_tex(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',
'w') as f:
for ref_name in [ref1_name, ref2_name, ref3_name]:
if ref1_name == ref_name:
dashline_active = ''
else:
dashline_active = '\\hdashline\n'
f.write('{}\\bfseries {}\\\\\n'.format(dashline_active, ref_name))
for c in coverages:
f.write('$c = {}$\\\\\n'.format(c))
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [length]
row += [val['Actual'][0]]
row += ['']
row += val['Actual'][1:]
row += ['']
row += [*val['Calign'][0:2], '{0:.2f}'.format(val[
'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.
format(val['Calign'][5])]
row += ['']
row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[
'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.
format(val['Calign25'][5])]
row += ['']
row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[
'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.
format(val['Calign50'][5])]
f.write(' & '.join([str(x) for x in row]) + '\\\\\n')
def write_assembly_stats2(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
refs = [ref1_name, ref2_name]
f_csv.writerow(range(len(refs) * 9))
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for
ref_name in refs for c in coverages for l in average_length_list])
for foo in ['Calign', 'Calign25', 'Calign50']:
for i in range(6):
if i in [2, 5]:
f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,
l][foo][i]) for ref_name in refs for c in coverages for
l in average_length_list])
else:
f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for
ref_name in refs for c in coverages for l in
average_length_list])
assembly_stats_list = []
stats_dict = {}
for ref_number in [1, 2, 3]:
for coverage in coverages:
for length in average_length_list:
ref_name = references[ref_number - 1]
dir = ('/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'
.format(ref_number, coverage, length))
stats_dict[ref_name, coverage, length] = {'Actual':
read_fasta_stats_file(dir + 'fasta.stat'), 'Calign':
read_assembly_file(dir + 'calign.assembly'), 'Calign25':
read_assembly_file(dir + 'calign_0_{}.assembly'.format(
length // 4)), 'Calign50': read_assembly_file(dir +
'calign_0_{}.assembly'.format(length // 2))}
def write_whole_stats() ->None:
headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP',
'CalignBLKH', 'CalignBAP']
vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0,
'CalignAAP': 0, 'CalignBLKH': 0, 'CalignBAP': 0}
atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP':
0, 'CalignBLKH': 0, 'CalignBAP': 0}
with open(DIR + 'assembly_stats.csv', 'r') as f:
f_csv = csv.DictReader(f, delimiter=',')
for row in f_csv:
for elem in headers:
if row['ActualValue'] == row[elem + 'Value']:
vals[elem] += 1
if row['ActualGaps'] == row[elem + 'Gaps']:
gaps[elem] += 1
if row['ActualValue'] == row[elem + 'Value'] and row[
'ActualGaps'] == row[elem + 'Gaps']:
both[elem] += 1
if row['CalignLKHValue'] == row['CalignAPValue']:
atspvsapval['CalignLKH'] += 1
atspvsapval['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue']:
atspvsapval['CalignALKH'] += 1
atspvsapval['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue']:
atspvsapval['CalignBLKH'] += 1
atspvsapval['CalignBAP'] += 1
if row['CalignLKHValue'] == row['CalignAPValue'] and row[
'CalignLKHGaps'] == row['CalignAPGaps']:
atspvsap['CalignLKH'] += 1
atspvsap['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue'] and row[
'CalignALKHGaps'] == row['CalignAAPGaps']:
atspvsap['CalignALKH'] += 1
atspvsap['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue'] and row[
'CalignBLKHGaps'] == row['CalignBAPGaps']:
atspvsap['CalignBLKH'] += 1
atspvsap['CalignBAP'] += 1
with open(DIR + 'complete_stats.csv', 'w') as g:
g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)
g_csv.writeheader()
g_csv.writerow(vals)
g_csv.writerow(gaps)
g_csv.writerow(both)
g_csv.writerow(atspvsapval)
g_csv.writerow(atspvsap)
write_assembly_stats(stats_dict)
write_assembly_stats2(stats_dict)
write_assembly_stats_tex(stats_dict)
write_whole_stats()
<|reserved_special_token_1|>
import csv
import os
from collections import namedtuple
from typing import List, Dict
from config import *
HEADER = ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs',
'APValue', 'APTime', 'ActualObjectiveValue']
Assembly_Stats = namedtuple('Assembly_Stats', HEADER)
dir = (
'/home/andreas/GDrive/workspace/sparsedata/ref1shuffled_c5_l700/calign.assembly'
)
def read_assembly_file(file: str) ->List:
if not os.path.isfile(file):
return [-1, -1, -1, -1, -1, -1]
with open(file, 'r') as f:
file_content_string = f.read()
if 'LKH_Contigs:\nLKH_Objective' in file_content_string:
lkh_gaps = -1
else:
lkh_gaps = len(file_content_string.split('LKH_Contigs:\n')[1].
split('\nLKH_Objective')[0].split('\n')) - 1
lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[
1].split('\n')[0])
lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(
'\n')[0])
if 'AP_Contigs:\nAP_Objective' in file_content_string:
ap_gaps = -1
else:
ap_gaps = len(file_content_string.split('AP_Contigs:\n')[1].
split('\nAP_Objective')[0].split('\n')) - 1
ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]
.split('\n')[0])
ap_time = float(file_content_string.split('AP_Time: ')[1].split(
'\n')[0])
return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]
def read_fasta_stats_file(file: str) ->Dict:
with open(file, 'r') as f:
file_content_string = f.read()
actual_objective_value = int(file_content_string.split(
'Objective function value: ')[1].split('\n')[0])
actual_gaps = int(file_content_string.split('Actual gaps: ')[1].
split('\n')[0])
no_of_reads = int(file_content_string.split('Number of reads: ')[1]
.split('\n')[0])
return [no_of_reads, actual_objective_value, actual_gaps]
def write_assembly_stats(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',
'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',
'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',
'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',
'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',
'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',
'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',
'CalignBAPTime'])
for ref_name in [ref1_name, ref2_name, ref3_name]:
for c in coverages:
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [ref_name, c, length]
row += val['Actual']
row += val['Calign']
row += val['Calign25']
row += val['Calign50']
f_csv.writerow(row)
def write_assembly_stats_tex(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',
'w') as f:
for ref_name in [ref1_name, ref2_name, ref3_name]:
if ref1_name == ref_name:
dashline_active = ''
else:
dashline_active = '\\hdashline\n'
f.write('{}\\bfseries {}\\\\\n'.format(dashline_active, ref_name))
for c in coverages:
f.write('$c = {}$\\\\\n'.format(c))
for length in average_length_list:
val = stats_dict[ref_name, c, length]
row = [length]
row += [val['Actual'][0]]
row += ['']
row += val['Actual'][1:]
row += ['']
row += [*val['Calign'][0:2], '{0:.2f}'.format(val[
'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.
format(val['Calign'][5])]
row += ['']
row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[
'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.
format(val['Calign25'][5])]
row += ['']
row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[
'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.
format(val['Calign50'][5])]
f.write(' & '.join([str(x) for x in row]) + '\\\\\n')
def write_assembly_stats2(statsdict: Dict) ->None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',
'w') as f:
f_csv = csv.writer(f, delimiter=',')
refs = [ref1_name, ref2_name]
f_csv.writerow(range(len(refs) * 9))
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for
ref_name in refs for c in coverages for l in average_length_list])
f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for
ref_name in refs for c in coverages for l in average_length_list])
for foo in ['Calign', 'Calign25', 'Calign50']:
for i in range(6):
if i in [2, 5]:
f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,
l][foo][i]) for ref_name in refs for c in coverages for
l in average_length_list])
else:
f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for
ref_name in refs for c in coverages for l in
average_length_list])
assembly_stats_list = []
stats_dict = {}
for ref_number in [1, 2, 3]:
for coverage in coverages:
for length in average_length_list:
ref_name = references[ref_number - 1]
dir = ('/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'
.format(ref_number, coverage, length))
stats_dict[ref_name, coverage, length] = {'Actual':
read_fasta_stats_file(dir + 'fasta.stat'), 'Calign':
read_assembly_file(dir + 'calign.assembly'), 'Calign25':
read_assembly_file(dir + 'calign_0_{}.assembly'.format(
length // 4)), 'Calign50': read_assembly_file(dir +
'calign_0_{}.assembly'.format(length // 2))}
def write_whole_stats() ->None:
headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP',
'CalignBLKH', 'CalignBAP']
vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,
'CalignBLKH': 0, 'CalignBAP': 0}
atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0,
'CalignAAP': 0, 'CalignBLKH': 0, 'CalignBAP': 0}
atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP':
0, 'CalignBLKH': 0, 'CalignBAP': 0}
with open(DIR + 'assembly_stats.csv', 'r') as f:
f_csv = csv.DictReader(f, delimiter=',')
for row in f_csv:
for elem in headers:
if row['ActualValue'] == row[elem + 'Value']:
vals[elem] += 1
if row['ActualGaps'] == row[elem + 'Gaps']:
gaps[elem] += 1
if row['ActualValue'] == row[elem + 'Value'] and row[
'ActualGaps'] == row[elem + 'Gaps']:
both[elem] += 1
if row['CalignLKHValue'] == row['CalignAPValue']:
atspvsapval['CalignLKH'] += 1
atspvsapval['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue']:
atspvsapval['CalignALKH'] += 1
atspvsapval['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue']:
atspvsapval['CalignBLKH'] += 1
atspvsapval['CalignBAP'] += 1
if row['CalignLKHValue'] == row['CalignAPValue'] and row[
'CalignLKHGaps'] == row['CalignAPGaps']:
atspvsap['CalignLKH'] += 1
atspvsap['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue'] and row[
'CalignALKHGaps'] == row['CalignAAPGaps']:
atspvsap['CalignALKH'] += 1
atspvsap['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue'] and row[
'CalignBLKHGaps'] == row['CalignBAPGaps']:
atspvsap['CalignBLKH'] += 1
atspvsap['CalignBAP'] += 1
with open(DIR + 'complete_stats.csv', 'w') as g:
g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)
g_csv.writeheader()
g_csv.writerow(vals)
g_csv.writerow(gaps)
g_csv.writerow(both)
g_csv.writerow(atspvsapval)
g_csv.writerow(atspvsap)
write_assembly_stats(stats_dict)
write_assembly_stats2(stats_dict)
write_assembly_stats_tex(stats_dict)
write_whole_stats()
<|reserved_special_token_1|>
import csv
import os
from collections import namedtuple
from typing import List, Dict
from config import *
HEADER = ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs', 'APValue', 'APTime', 'ActualObjectiveValue']
Assembly_Stats = namedtuple('Assembly_Stats', HEADER)
dir = '/home/andreas/GDrive/workspace/sparsedata/ref1shuffled_c5_l700/calign.assembly'
def read_assembly_file(file: str) -> List:
if not os.path.isfile(file):
return [-1, -1, -1, -1, -1, -1]
with open(file, 'r') as f:
file_content_string = f.read()
if 'LKH_Contigs:\nLKH_Objective' in file_content_string:
lkh_gaps = -1
else:
lkh_gaps = len(file_content_string.split('LKH_Contigs:\n')[1].split('\nLKH_Objective')[0].split('\n')) - 1
lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[1].split('\n')[0])
lkh_time = float(file_content_string.split('LKH_Time: ')[1].split('\n')[0])
if 'AP_Contigs:\nAP_Objective' in file_content_string:
ap_gaps = -1
else:
ap_gaps = len(file_content_string.split('AP_Contigs:\n')[1].split('\nAP_Objective')[0].split('\n')) - 1
ap_value = int(file_content_string.split('AP_Objective_Value: ')[1].split('\n')[0])
ap_time = float(file_content_string.split('AP_Time: ')[1].split('\n')[0])
return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]
def read_fasta_stats_file(file: str) -> Dict:
with open(file, 'r') as f:
file_content_string = f.read()
actual_objective_value = int(file_content_string.split('Objective function value: ')[1].split('\n')[0])
actual_gaps = int(file_content_string.split('Actual gaps: ')[1].split('\n')[0])
no_of_reads = int(file_content_string.split('Number of reads: ')[1].split('\n')[0])
return [no_of_reads, actual_objective_value, actual_gaps]
# def write_assembly_stats(assembly_stats_list: List[Assembly_Stats]) -> None:
# with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv', 'w') as f:
# f_csv = csv.writer(f, delimiter=',')
# f_csv.writerow(
# ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs', 'APValue', 'APTime', 'ActualObjectiveValue'])
# for elem in assembly_stats_list:
# f_csv.writerow(elem)
def write_assembly_stats(statsdict: Dict) -> None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv', 'w') as f:
f_csv = csv.writer(f, delimiter=',')
f_csv.writerow(
['Genome', 'Coverage', 'AvgLength', 'Reads', 'ActualValue', 'ActualGaps',
'CalignLKHValue', 'CalignLKHGaps', 'CalignLKHTime',
'CalignAPValue', 'CalignAPGaps', 'CalignAPTime',
'CalignALKHValue', 'CalignALKHGaps', 'CalignALKHTime',
'CalignAAPValue', 'CalignAAPGaps', 'CalignAAPTime',
'CalignBLKHValue', 'CalignBLKHGaps', 'CalignBLKHTime',
'CalignBAPValue', 'CalignBAPGaps', 'CalignBAPTime',
])
for ref_name in [ref1_name, ref2_name, ref3_name]:
for c in coverages:
for length in average_length_list:
val = stats_dict[(ref_name, c, length)]
row = [ref_name, c, length]
row += val['Actual']
row += val['Calign']
row += val['Calign25']
row += val['Calign50']
f_csv.writerow(row)
def write_assembly_stats_tex(statsdict: Dict) -> None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex', 'w') as f:
for ref_name in [ref1_name, ref2_name, ref3_name]:
if ref1_name == ref_name:
dashline_active = ''
else:
dashline_active = '\\hdashline\n'
f.write('{}\\bfseries {}\\\\\n'.format(dashline_active, ref_name))
for c in coverages:
f.write('$c = {}$\\\\\n'.format(c))
for length in average_length_list:
val = stats_dict[(ref_name, c, length)]
row = [length]
row += [val['Actual'][0]]
row += ['']
row += val['Actual'][1:]
row += ['']
row += [*val['Calign'][0:2], '{0:.2f}'.format(val['Calign'][2]), *val['Calign'][3:5],
'{0:.2f}'.format(val['Calign'][5])]
row += ['']
row += [*val['Calign25'][0:2], '{0:.2f}'.format(val['Calign25'][2]), *val['Calign25'][3:5],
'{0:.2f}'.format(val['Calign25'][5])]
row += ['']
row += [*val['Calign50'][0:2], '{0:.2f}'.format(val['Calign50'][2]), *val['Calign50'][3:5],
'{0:.2f}'.format(val['Calign50'][5])]
f.write(' & '.join([str(x) for x in row]) + '\\\\\n')
def write_assembly_stats2(statsdict: Dict) -> None:
with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv', 'w') as f:
f_csv = csv.writer(f, delimiter=',')
refs = [ref1_name, ref2_name]
f_csv.writerow(range(len(refs) * 9))
f_csv.writerow(
[stats_dict[(ref_name, c, l)]['Actual'][0] for ref_name in refs for c in
coverages for l in average_length_list])
f_csv.writerow(
[stats_dict[(ref_name, c, l)]['Actual'][1] for ref_name in refs for c in
coverages for l
in average_length_list])
f_csv.writerow(
[stats_dict[(ref_name, c, l)]['Actual'][2] for ref_name in refs for c in
coverages for l
in average_length_list])
for foo in ['Calign', 'Calign25', 'Calign50']:
for i in range(6):
if i in [2, 5]:
f_csv.writerow(
['{0:.2f}'.format(stats_dict[(ref_name, c, l)][foo][i]) for ref_name in refs for c in
coverages
for l in average_length_list])
else:
f_csv.writerow(
[stats_dict[(ref_name, c, l)][foo][i] for ref_name in refs for c in
coverages
for l in average_length_list])
assembly_stats_list = []
stats_dict = {}
# for dir in sorted(glob.glob('/home/andreas/GDrive/workspace/sparsedata/ref[1,2,3]_c[5,20,40]*/')):
for ref_number in [1, 2, 3]:
for coverage in coverages:
for length in average_length_list:
# file_sub_dir = dir.split('/')[-2] # example ref1_c5_l100
# ref_number = int(file_sub_dir.split('ref')[1].split('_')[0])
ref_name = references[ref_number - 1]
# coverage = int(file_sub_dir.split('_c')[1].split('_')[0])
# length = int(file_sub_dir.split('_l')[1])
dir = '/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'.format(ref_number, coverage, length)
stats_dict[(ref_name, coverage, length)] = {'Actual': read_fasta_stats_file(dir + 'fasta.stat'),
'Calign': read_assembly_file(dir + 'calign.assembly'),
'Calign25': read_assembly_file(
dir + 'calign_0_{}.assembly'.format(length // 4)),
'Calign50': read_assembly_file(
dir + 'calign_0_{}.assembly'.format(length // 2))}
# dir = '{}-{}-{}'.format(references[ref_number - 1], coverage, length)
# assembly_stats_list.append(
# Assembly_Stats(dir, len(lkh_contigs), lkh_value, lkh_time, len(ap_contigs), ap_value, ap_time,
# actual_Objective_value))
def write_whole_stats() -> None:
headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP', 'CalignBLKH',
'CalignBAP']
vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,
'CalignBAP': 0}
gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,
'CalignBAP': 0}
both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,
'CalignBAP': 0}
atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,
'CalignBAP': 0}
atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,
'CalignBAP': 0}
with open(DIR + 'assembly_stats.csv', 'r') as f:
f_csv = csv.DictReader(f, delimiter=',')
for row in f_csv:
for elem in headers:
if row['ActualValue'] == row[elem + 'Value']:
vals[elem] += 1
if row['ActualGaps'] == row[elem + 'Gaps']:
gaps[elem] += 1
if row['ActualValue'] == row[elem + 'Value'] and row['ActualGaps'] == row[elem + 'Gaps']:
both[elem] += 1
if row['CalignLKHValue'] == row['CalignAPValue']:
atspvsapval['CalignLKH'] += 1
atspvsapval['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue']:
atspvsapval['CalignALKH'] += 1
atspvsapval['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue']:
atspvsapval['CalignBLKH'] += 1
atspvsapval['CalignBAP'] += 1
if row['CalignLKHValue'] == row['CalignAPValue'] and row['CalignLKHGaps'] == row['CalignAPGaps']:
atspvsap['CalignLKH'] += 1
atspvsap['CalignAP'] += 1
if row['CalignALKHValue'] == row['CalignAAPValue'] and row['CalignALKHGaps'] == row['CalignAAPGaps']:
atspvsap['CalignALKH'] += 1
atspvsap['CalignAAP'] += 1
if row['CalignBLKHValue'] == row['CalignBAPValue'] and row['CalignBLKHGaps'] == row['CalignBAPGaps']:
atspvsap['CalignBLKH'] += 1
atspvsap['CalignBAP'] += 1
with open(DIR + 'complete_stats.csv', 'w') as g:
g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)
g_csv.writeheader()
g_csv.writerow(vals)
g_csv.writerow(gaps)
g_csv.writerow(both)
g_csv.writerow(atspvsapval)
g_csv.writerow(atspvsap)
write_assembly_stats(stats_dict)
write_assembly_stats2(stats_dict)
write_assembly_stats_tex(stats_dict)
write_whole_stats()
|
flexible
|
{
"blob_id": "edd98e3996b0fce46d33dd33340018ab5b029637",
"index": 2333,
"step-1": "<mask token>\n\n\ndef read_assembly_file(file: str) ->List:\n if not os.path.isfile(file):\n return [-1, -1, -1, -1, -1, -1]\n with open(file, 'r') as f:\n file_content_string = f.read()\n if 'LKH_Contigs:\\nLKH_Objective' in file_content_string:\n lkh_gaps = -1\n else:\n lkh_gaps = len(file_content_string.split('LKH_Contigs:\\n')[1].\n split('\\nLKH_Objective')[0].split('\\n')) - 1\n lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[\n 1].split('\\n')[0])\n lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(\n '\\n')[0])\n if 'AP_Contigs:\\nAP_Objective' in file_content_string:\n ap_gaps = -1\n else:\n ap_gaps = len(file_content_string.split('AP_Contigs:\\n')[1].\n split('\\nAP_Objective')[0].split('\\n')) - 1\n ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]\n .split('\\n')[0])\n ap_time = float(file_content_string.split('AP_Time: ')[1].split(\n '\\n')[0])\n return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]\n\n\ndef read_fasta_stats_file(file: str) ->Dict:\n with open(file, 'r') as f:\n file_content_string = f.read()\n actual_objective_value = int(file_content_string.split(\n 'Objective function value: ')[1].split('\\n')[0])\n actual_gaps = int(file_content_string.split('Actual gaps: ')[1].\n split('\\n')[0])\n no_of_reads = int(file_content_string.split('Number of reads: ')[1]\n .split('\\n')[0])\n return [no_of_reads, actual_objective_value, actual_gaps]\n\n\ndef write_assembly_stats(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',\n 'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',\n 'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',\n 'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',\n 'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',\n 'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',\n 'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',\n 'CalignBAPTime'])\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n for c in coverages:\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [ref_name, c, length]\n row += val['Actual']\n row += val['Calign']\n row += val['Calign25']\n row += val['Calign50']\n f_csv.writerow(row)\n\n\ndef write_assembly_stats_tex(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',\n 'w') as f:\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n if ref1_name == ref_name:\n dashline_active = ''\n else:\n dashline_active = '\\\\hdashline\\n'\n f.write('{}\\\\bfseries {}\\\\\\\\\\n'.format(dashline_active, ref_name))\n for c in coverages:\n f.write('$c = {}$\\\\\\\\\\n'.format(c))\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [length]\n row += [val['Actual'][0]]\n row += ['']\n row += val['Actual'][1:]\n row += ['']\n row += [*val['Calign'][0:2], '{0:.2f}'.format(val[\n 'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.\n format(val['Calign'][5])]\n row += ['']\n row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[\n 'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.\n format(val['Calign25'][5])]\n row += ['']\n row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[\n 'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.\n format(val['Calign50'][5])]\n f.write(' & '.join([str(x) for x in row]) + '\\\\\\\\\\n')\n\n\ndef write_assembly_stats2(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n refs = [ref1_name, ref2_name]\n f_csv.writerow(range(len(refs) * 9))\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for\n ref_name in refs for c in coverages for l in average_length_list])\n for foo in ['Calign', 'Calign25', 'Calign50']:\n for i in range(6):\n if i in [2, 5]:\n f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,\n l][foo][i]) for ref_name in refs for c in coverages for\n l in average_length_list])\n else:\n f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for\n ref_name in refs for c in coverages for l in\n average_length_list])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_assembly_file(file: str) ->List:\n if not os.path.isfile(file):\n return [-1, -1, -1, -1, -1, -1]\n with open(file, 'r') as f:\n file_content_string = f.read()\n if 'LKH_Contigs:\\nLKH_Objective' in file_content_string:\n lkh_gaps = -1\n else:\n lkh_gaps = len(file_content_string.split('LKH_Contigs:\\n')[1].\n split('\\nLKH_Objective')[0].split('\\n')) - 1\n lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[\n 1].split('\\n')[0])\n lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(\n '\\n')[0])\n if 'AP_Contigs:\\nAP_Objective' in file_content_string:\n ap_gaps = -1\n else:\n ap_gaps = len(file_content_string.split('AP_Contigs:\\n')[1].\n split('\\nAP_Objective')[0].split('\\n')) - 1\n ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]\n .split('\\n')[0])\n ap_time = float(file_content_string.split('AP_Time: ')[1].split(\n '\\n')[0])\n return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]\n\n\ndef read_fasta_stats_file(file: str) ->Dict:\n with open(file, 'r') as f:\n file_content_string = f.read()\n actual_objective_value = int(file_content_string.split(\n 'Objective function value: ')[1].split('\\n')[0])\n actual_gaps = int(file_content_string.split('Actual gaps: ')[1].\n split('\\n')[0])\n no_of_reads = int(file_content_string.split('Number of reads: ')[1]\n .split('\\n')[0])\n return [no_of_reads, actual_objective_value, actual_gaps]\n\n\ndef write_assembly_stats(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',\n 'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',\n 'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',\n 'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',\n 'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',\n 'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',\n 'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',\n 'CalignBAPTime'])\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n for c in coverages:\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [ref_name, c, length]\n row += val['Actual']\n row += val['Calign']\n row += val['Calign25']\n row += val['Calign50']\n f_csv.writerow(row)\n\n\ndef write_assembly_stats_tex(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',\n 'w') as f:\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n if ref1_name == ref_name:\n dashline_active = ''\n else:\n dashline_active = '\\\\hdashline\\n'\n f.write('{}\\\\bfseries {}\\\\\\\\\\n'.format(dashline_active, ref_name))\n for c in coverages:\n f.write('$c = {}$\\\\\\\\\\n'.format(c))\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [length]\n row += [val['Actual'][0]]\n row += ['']\n row += val['Actual'][1:]\n row += ['']\n row += [*val['Calign'][0:2], '{0:.2f}'.format(val[\n 'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.\n format(val['Calign'][5])]\n row += ['']\n row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[\n 'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.\n format(val['Calign25'][5])]\n row += ['']\n row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[\n 'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.\n format(val['Calign50'][5])]\n f.write(' & '.join([str(x) for x in row]) + '\\\\\\\\\\n')\n\n\ndef write_assembly_stats2(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n refs = [ref1_name, ref2_name]\n f_csv.writerow(range(len(refs) * 9))\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for\n ref_name in refs for c in coverages for l in average_length_list])\n for foo in ['Calign', 'Calign25', 'Calign50']:\n for i in range(6):\n if i in [2, 5]:\n f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,\n l][foo][i]) for ref_name in refs for c in coverages for\n l in average_length_list])\n else:\n f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for\n ref_name in refs for c in coverages for l in\n average_length_list])\n\n\n<mask token>\nfor ref_number in [1, 2, 3]:\n for coverage in coverages:\n for length in average_length_list:\n ref_name = references[ref_number - 1]\n dir = ('/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'\n .format(ref_number, coverage, length))\n stats_dict[ref_name, coverage, length] = {'Actual':\n read_fasta_stats_file(dir + 'fasta.stat'), 'Calign':\n read_assembly_file(dir + 'calign.assembly'), 'Calign25':\n read_assembly_file(dir + 'calign_0_{}.assembly'.format(\n length // 4)), 'Calign50': read_assembly_file(dir +\n 'calign_0_{}.assembly'.format(length // 2))}\n\n\ndef write_whole_stats() ->None:\n headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP',\n 'CalignBLKH', 'CalignBAP']\n vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0,\n 'CalignAAP': 0, 'CalignBLKH': 0, 'CalignBAP': 0}\n atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP':\n 0, 'CalignBLKH': 0, 'CalignBAP': 0}\n with open(DIR + 'assembly_stats.csv', 'r') as f:\n f_csv = csv.DictReader(f, delimiter=',')\n for row in f_csv:\n for elem in headers:\n if row['ActualValue'] == row[elem + 'Value']:\n vals[elem] += 1\n if row['ActualGaps'] == row[elem + 'Gaps']:\n gaps[elem] += 1\n if row['ActualValue'] == row[elem + 'Value'] and row[\n 'ActualGaps'] == row[elem + 'Gaps']:\n both[elem] += 1\n if row['CalignLKHValue'] == row['CalignAPValue']:\n atspvsapval['CalignLKH'] += 1\n atspvsapval['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue']:\n atspvsapval['CalignALKH'] += 1\n atspvsapval['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue']:\n atspvsapval['CalignBLKH'] += 1\n atspvsapval['CalignBAP'] += 1\n if row['CalignLKHValue'] == row['CalignAPValue'] and row[\n 'CalignLKHGaps'] == row['CalignAPGaps']:\n atspvsap['CalignLKH'] += 1\n atspvsap['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue'] and row[\n 'CalignALKHGaps'] == row['CalignAAPGaps']:\n atspvsap['CalignALKH'] += 1\n atspvsap['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue'] and row[\n 'CalignBLKHGaps'] == row['CalignBAPGaps']:\n atspvsap['CalignBLKH'] += 1\n atspvsap['CalignBAP'] += 1\n with open(DIR + 'complete_stats.csv', 'w') as g:\n g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)\n g_csv.writeheader()\n g_csv.writerow(vals)\n g_csv.writerow(gaps)\n g_csv.writerow(both)\n g_csv.writerow(atspvsapval)\n g_csv.writerow(atspvsap)\n\n\nwrite_assembly_stats(stats_dict)\nwrite_assembly_stats2(stats_dict)\nwrite_assembly_stats_tex(stats_dict)\nwrite_whole_stats()\n",
"step-3": "<mask token>\nHEADER = ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs',\n 'APValue', 'APTime', 'ActualObjectiveValue']\nAssembly_Stats = namedtuple('Assembly_Stats', HEADER)\ndir = (\n '/home/andreas/GDrive/workspace/sparsedata/ref1shuffled_c5_l700/calign.assembly'\n )\n\n\ndef read_assembly_file(file: str) ->List:\n if not os.path.isfile(file):\n return [-1, -1, -1, -1, -1, -1]\n with open(file, 'r') as f:\n file_content_string = f.read()\n if 'LKH_Contigs:\\nLKH_Objective' in file_content_string:\n lkh_gaps = -1\n else:\n lkh_gaps = len(file_content_string.split('LKH_Contigs:\\n')[1].\n split('\\nLKH_Objective')[0].split('\\n')) - 1\n lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[\n 1].split('\\n')[0])\n lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(\n '\\n')[0])\n if 'AP_Contigs:\\nAP_Objective' in file_content_string:\n ap_gaps = -1\n else:\n ap_gaps = len(file_content_string.split('AP_Contigs:\\n')[1].\n split('\\nAP_Objective')[0].split('\\n')) - 1\n ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]\n .split('\\n')[0])\n ap_time = float(file_content_string.split('AP_Time: ')[1].split(\n '\\n')[0])\n return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]\n\n\ndef read_fasta_stats_file(file: str) ->Dict:\n with open(file, 'r') as f:\n file_content_string = f.read()\n actual_objective_value = int(file_content_string.split(\n 'Objective function value: ')[1].split('\\n')[0])\n actual_gaps = int(file_content_string.split('Actual gaps: ')[1].\n split('\\n')[0])\n no_of_reads = int(file_content_string.split('Number of reads: ')[1]\n .split('\\n')[0])\n return [no_of_reads, actual_objective_value, actual_gaps]\n\n\ndef write_assembly_stats(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',\n 'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',\n 'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',\n 'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',\n 'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',\n 'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',\n 'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',\n 'CalignBAPTime'])\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n for c in coverages:\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [ref_name, c, length]\n row += val['Actual']\n row += val['Calign']\n row += val['Calign25']\n row += val['Calign50']\n f_csv.writerow(row)\n\n\ndef write_assembly_stats_tex(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',\n 'w') as f:\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n if ref1_name == ref_name:\n dashline_active = ''\n else:\n dashline_active = '\\\\hdashline\\n'\n f.write('{}\\\\bfseries {}\\\\\\\\\\n'.format(dashline_active, ref_name))\n for c in coverages:\n f.write('$c = {}$\\\\\\\\\\n'.format(c))\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [length]\n row += [val['Actual'][0]]\n row += ['']\n row += val['Actual'][1:]\n row += ['']\n row += [*val['Calign'][0:2], '{0:.2f}'.format(val[\n 'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.\n format(val['Calign'][5])]\n row += ['']\n row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[\n 'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.\n format(val['Calign25'][5])]\n row += ['']\n row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[\n 'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.\n format(val['Calign50'][5])]\n f.write(' & '.join([str(x) for x in row]) + '\\\\\\\\\\n')\n\n\ndef write_assembly_stats2(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n refs = [ref1_name, ref2_name]\n f_csv.writerow(range(len(refs) * 9))\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for\n ref_name in refs for c in coverages for l in average_length_list])\n for foo in ['Calign', 'Calign25', 'Calign50']:\n for i in range(6):\n if i in [2, 5]:\n f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,\n l][foo][i]) for ref_name in refs for c in coverages for\n l in average_length_list])\n else:\n f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for\n ref_name in refs for c in coverages for l in\n average_length_list])\n\n\nassembly_stats_list = []\nstats_dict = {}\nfor ref_number in [1, 2, 3]:\n for coverage in coverages:\n for length in average_length_list:\n ref_name = references[ref_number - 1]\n dir = ('/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'\n .format(ref_number, coverage, length))\n stats_dict[ref_name, coverage, length] = {'Actual':\n read_fasta_stats_file(dir + 'fasta.stat'), 'Calign':\n read_assembly_file(dir + 'calign.assembly'), 'Calign25':\n read_assembly_file(dir + 'calign_0_{}.assembly'.format(\n length // 4)), 'Calign50': read_assembly_file(dir +\n 'calign_0_{}.assembly'.format(length // 2))}\n\n\ndef write_whole_stats() ->None:\n headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP',\n 'CalignBLKH', 'CalignBAP']\n vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0,\n 'CalignAAP': 0, 'CalignBLKH': 0, 'CalignBAP': 0}\n atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP':\n 0, 'CalignBLKH': 0, 'CalignBAP': 0}\n with open(DIR + 'assembly_stats.csv', 'r') as f:\n f_csv = csv.DictReader(f, delimiter=',')\n for row in f_csv:\n for elem in headers:\n if row['ActualValue'] == row[elem + 'Value']:\n vals[elem] += 1\n if row['ActualGaps'] == row[elem + 'Gaps']:\n gaps[elem] += 1\n if row['ActualValue'] == row[elem + 'Value'] and row[\n 'ActualGaps'] == row[elem + 'Gaps']:\n both[elem] += 1\n if row['CalignLKHValue'] == row['CalignAPValue']:\n atspvsapval['CalignLKH'] += 1\n atspvsapval['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue']:\n atspvsapval['CalignALKH'] += 1\n atspvsapval['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue']:\n atspvsapval['CalignBLKH'] += 1\n atspvsapval['CalignBAP'] += 1\n if row['CalignLKHValue'] == row['CalignAPValue'] and row[\n 'CalignLKHGaps'] == row['CalignAPGaps']:\n atspvsap['CalignLKH'] += 1\n atspvsap['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue'] and row[\n 'CalignALKHGaps'] == row['CalignAAPGaps']:\n atspvsap['CalignALKH'] += 1\n atspvsap['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue'] and row[\n 'CalignBLKHGaps'] == row['CalignBAPGaps']:\n atspvsap['CalignBLKH'] += 1\n atspvsap['CalignBAP'] += 1\n with open(DIR + 'complete_stats.csv', 'w') as g:\n g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)\n g_csv.writeheader()\n g_csv.writerow(vals)\n g_csv.writerow(gaps)\n g_csv.writerow(both)\n g_csv.writerow(atspvsapval)\n g_csv.writerow(atspvsap)\n\n\nwrite_assembly_stats(stats_dict)\nwrite_assembly_stats2(stats_dict)\nwrite_assembly_stats_tex(stats_dict)\nwrite_whole_stats()\n",
"step-4": "import csv\nimport os\nfrom collections import namedtuple\nfrom typing import List, Dict\nfrom config import *\nHEADER = ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs',\n 'APValue', 'APTime', 'ActualObjectiveValue']\nAssembly_Stats = namedtuple('Assembly_Stats', HEADER)\ndir = (\n '/home/andreas/GDrive/workspace/sparsedata/ref1shuffled_c5_l700/calign.assembly'\n )\n\n\ndef read_assembly_file(file: str) ->List:\n if not os.path.isfile(file):\n return [-1, -1, -1, -1, -1, -1]\n with open(file, 'r') as f:\n file_content_string = f.read()\n if 'LKH_Contigs:\\nLKH_Objective' in file_content_string:\n lkh_gaps = -1\n else:\n lkh_gaps = len(file_content_string.split('LKH_Contigs:\\n')[1].\n split('\\nLKH_Objective')[0].split('\\n')) - 1\n lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[\n 1].split('\\n')[0])\n lkh_time = float(file_content_string.split('LKH_Time: ')[1].split(\n '\\n')[0])\n if 'AP_Contigs:\\nAP_Objective' in file_content_string:\n ap_gaps = -1\n else:\n ap_gaps = len(file_content_string.split('AP_Contigs:\\n')[1].\n split('\\nAP_Objective')[0].split('\\n')) - 1\n ap_value = int(file_content_string.split('AP_Objective_Value: ')[1]\n .split('\\n')[0])\n ap_time = float(file_content_string.split('AP_Time: ')[1].split(\n '\\n')[0])\n return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]\n\n\ndef read_fasta_stats_file(file: str) ->Dict:\n with open(file, 'r') as f:\n file_content_string = f.read()\n actual_objective_value = int(file_content_string.split(\n 'Objective function value: ')[1].split('\\n')[0])\n actual_gaps = int(file_content_string.split('Actual gaps: ')[1].\n split('\\n')[0])\n no_of_reads = int(file_content_string.split('Number of reads: ')[1]\n .split('\\n')[0])\n return [no_of_reads, actual_objective_value, actual_gaps]\n\n\ndef write_assembly_stats(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n f_csv.writerow(['Genome', 'Coverage', 'AvgLength', 'Reads',\n 'ActualValue', 'ActualGaps', 'CalignLKHValue', 'CalignLKHGaps',\n 'CalignLKHTime', 'CalignAPValue', 'CalignAPGaps',\n 'CalignAPTime', 'CalignALKHValue', 'CalignALKHGaps',\n 'CalignALKHTime', 'CalignAAPValue', 'CalignAAPGaps',\n 'CalignAAPTime', 'CalignBLKHValue', 'CalignBLKHGaps',\n 'CalignBLKHTime', 'CalignBAPValue', 'CalignBAPGaps',\n 'CalignBAPTime'])\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n for c in coverages:\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [ref_name, c, length]\n row += val['Actual']\n row += val['Calign']\n row += val['Calign25']\n row += val['Calign50']\n f_csv.writerow(row)\n\n\ndef write_assembly_stats_tex(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex',\n 'w') as f:\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n if ref1_name == ref_name:\n dashline_active = ''\n else:\n dashline_active = '\\\\hdashline\\n'\n f.write('{}\\\\bfseries {}\\\\\\\\\\n'.format(dashline_active, ref_name))\n for c in coverages:\n f.write('$c = {}$\\\\\\\\\\n'.format(c))\n for length in average_length_list:\n val = stats_dict[ref_name, c, length]\n row = [length]\n row += [val['Actual'][0]]\n row += ['']\n row += val['Actual'][1:]\n row += ['']\n row += [*val['Calign'][0:2], '{0:.2f}'.format(val[\n 'Calign'][2]), *val['Calign'][3:5], '{0:.2f}'.\n format(val['Calign'][5])]\n row += ['']\n row += [*val['Calign25'][0:2], '{0:.2f}'.format(val[\n 'Calign25'][2]), *val['Calign25'][3:5], '{0:.2f}'.\n format(val['Calign25'][5])]\n row += ['']\n row += [*val['Calign50'][0:2], '{0:.2f}'.format(val[\n 'Calign50'][2]), *val['Calign50'][3:5], '{0:.2f}'.\n format(val['Calign50'][5])]\n f.write(' & '.join([str(x) for x in row]) + '\\\\\\\\\\n')\n\n\ndef write_assembly_stats2(statsdict: Dict) ->None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv',\n 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n refs = [ref1_name, ref2_name]\n f_csv.writerow(range(len(refs) * 9))\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][0] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][1] for\n ref_name in refs for c in coverages for l in average_length_list])\n f_csv.writerow([stats_dict[ref_name, c, l]['Actual'][2] for\n ref_name in refs for c in coverages for l in average_length_list])\n for foo in ['Calign', 'Calign25', 'Calign50']:\n for i in range(6):\n if i in [2, 5]:\n f_csv.writerow(['{0:.2f}'.format(stats_dict[ref_name, c,\n l][foo][i]) for ref_name in refs for c in coverages for\n l in average_length_list])\n else:\n f_csv.writerow([stats_dict[ref_name, c, l][foo][i] for\n ref_name in refs for c in coverages for l in\n average_length_list])\n\n\nassembly_stats_list = []\nstats_dict = {}\nfor ref_number in [1, 2, 3]:\n for coverage in coverages:\n for length in average_length_list:\n ref_name = references[ref_number - 1]\n dir = ('/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'\n .format(ref_number, coverage, length))\n stats_dict[ref_name, coverage, length] = {'Actual':\n read_fasta_stats_file(dir + 'fasta.stat'), 'Calign':\n read_assembly_file(dir + 'calign.assembly'), 'Calign25':\n read_assembly_file(dir + 'calign_0_{}.assembly'.format(\n length // 4)), 'Calign50': read_assembly_file(dir +\n 'calign_0_{}.assembly'.format(length // 2))}\n\n\ndef write_whole_stats() ->None:\n headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP',\n 'CalignBLKH', 'CalignBAP']\n vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0,\n 'CalignBLKH': 0, 'CalignBAP': 0}\n atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0,\n 'CalignAAP': 0, 'CalignBLKH': 0, 'CalignBAP': 0}\n atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP':\n 0, 'CalignBLKH': 0, 'CalignBAP': 0}\n with open(DIR + 'assembly_stats.csv', 'r') as f:\n f_csv = csv.DictReader(f, delimiter=',')\n for row in f_csv:\n for elem in headers:\n if row['ActualValue'] == row[elem + 'Value']:\n vals[elem] += 1\n if row['ActualGaps'] == row[elem + 'Gaps']:\n gaps[elem] += 1\n if row['ActualValue'] == row[elem + 'Value'] and row[\n 'ActualGaps'] == row[elem + 'Gaps']:\n both[elem] += 1\n if row['CalignLKHValue'] == row['CalignAPValue']:\n atspvsapval['CalignLKH'] += 1\n atspvsapval['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue']:\n atspvsapval['CalignALKH'] += 1\n atspvsapval['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue']:\n atspvsapval['CalignBLKH'] += 1\n atspvsapval['CalignBAP'] += 1\n if row['CalignLKHValue'] == row['CalignAPValue'] and row[\n 'CalignLKHGaps'] == row['CalignAPGaps']:\n atspvsap['CalignLKH'] += 1\n atspvsap['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue'] and row[\n 'CalignALKHGaps'] == row['CalignAAPGaps']:\n atspvsap['CalignALKH'] += 1\n atspvsap['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue'] and row[\n 'CalignBLKHGaps'] == row['CalignBAPGaps']:\n atspvsap['CalignBLKH'] += 1\n atspvsap['CalignBAP'] += 1\n with open(DIR + 'complete_stats.csv', 'w') as g:\n g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)\n g_csv.writeheader()\n g_csv.writerow(vals)\n g_csv.writerow(gaps)\n g_csv.writerow(both)\n g_csv.writerow(atspvsapval)\n g_csv.writerow(atspvsap)\n\n\nwrite_assembly_stats(stats_dict)\nwrite_assembly_stats2(stats_dict)\nwrite_assembly_stats_tex(stats_dict)\nwrite_whole_stats()\n",
"step-5": "import csv\nimport os\nfrom collections import namedtuple\nfrom typing import List, Dict\n\nfrom config import *\n\nHEADER = ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs', 'APValue', 'APTime', 'ActualObjectiveValue']\nAssembly_Stats = namedtuple('Assembly_Stats', HEADER)\n\ndir = '/home/andreas/GDrive/workspace/sparsedata/ref1shuffled_c5_l700/calign.assembly'\n\n\ndef read_assembly_file(file: str) -> List:\n if not os.path.isfile(file):\n return [-1, -1, -1, -1, -1, -1]\n with open(file, 'r') as f:\n file_content_string = f.read()\n if 'LKH_Contigs:\\nLKH_Objective' in file_content_string:\n lkh_gaps = -1\n else:\n lkh_gaps = len(file_content_string.split('LKH_Contigs:\\n')[1].split('\\nLKH_Objective')[0].split('\\n')) - 1\n lkh_value = int(file_content_string.split('LKH_Objective_Value: ')[1].split('\\n')[0])\n lkh_time = float(file_content_string.split('LKH_Time: ')[1].split('\\n')[0])\n if 'AP_Contigs:\\nAP_Objective' in file_content_string:\n ap_gaps = -1\n else:\n ap_gaps = len(file_content_string.split('AP_Contigs:\\n')[1].split('\\nAP_Objective')[0].split('\\n')) - 1\n ap_value = int(file_content_string.split('AP_Objective_Value: ')[1].split('\\n')[0])\n ap_time = float(file_content_string.split('AP_Time: ')[1].split('\\n')[0])\n\n return [lkh_value, lkh_gaps, lkh_time, ap_value, ap_gaps, ap_time]\n\n\ndef read_fasta_stats_file(file: str) -> Dict:\n with open(file, 'r') as f:\n file_content_string = f.read()\n actual_objective_value = int(file_content_string.split('Objective function value: ')[1].split('\\n')[0])\n actual_gaps = int(file_content_string.split('Actual gaps: ')[1].split('\\n')[0])\n no_of_reads = int(file_content_string.split('Number of reads: ')[1].split('\\n')[0])\n return [no_of_reads, actual_objective_value, actual_gaps]\n\n\n# def write_assembly_stats(assembly_stats_list: List[Assembly_Stats]) -> None:\n# with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv', 'w') as f:\n# f_csv = csv.writer(f, delimiter=',')\n# f_csv.writerow(\n# ['File', 'LKHContigs', 'LKHValue', 'LKHTime', 'APContigs', 'APValue', 'APTime', 'ActualObjectiveValue'])\n# for elem in assembly_stats_list:\n# f_csv.writerow(elem)\n\ndef write_assembly_stats(statsdict: Dict) -> None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.csv', 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n f_csv.writerow(\n ['Genome', 'Coverage', 'AvgLength', 'Reads', 'ActualValue', 'ActualGaps',\n 'CalignLKHValue', 'CalignLKHGaps', 'CalignLKHTime',\n 'CalignAPValue', 'CalignAPGaps', 'CalignAPTime',\n 'CalignALKHValue', 'CalignALKHGaps', 'CalignALKHTime',\n 'CalignAAPValue', 'CalignAAPGaps', 'CalignAAPTime',\n 'CalignBLKHValue', 'CalignBLKHGaps', 'CalignBLKHTime',\n 'CalignBAPValue', 'CalignBAPGaps', 'CalignBAPTime',\n ])\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n for c in coverages:\n for length in average_length_list:\n val = stats_dict[(ref_name, c, length)]\n row = [ref_name, c, length]\n row += val['Actual']\n row += val['Calign']\n row += val['Calign25']\n row += val['Calign50']\n\n f_csv.writerow(row)\n\n\ndef write_assembly_stats_tex(statsdict: Dict) -> None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats.tex', 'w') as f:\n for ref_name in [ref1_name, ref2_name, ref3_name]:\n if ref1_name == ref_name:\n dashline_active = ''\n else:\n dashline_active = '\\\\hdashline\\n'\n f.write('{}\\\\bfseries {}\\\\\\\\\\n'.format(dashline_active, ref_name))\n for c in coverages:\n f.write('$c = {}$\\\\\\\\\\n'.format(c))\n for length in average_length_list:\n val = stats_dict[(ref_name, c, length)]\n row = [length]\n row += [val['Actual'][0]]\n row += ['']\n row += val['Actual'][1:]\n row += ['']\n row += [*val['Calign'][0:2], '{0:.2f}'.format(val['Calign'][2]), *val['Calign'][3:5],\n '{0:.2f}'.format(val['Calign'][5])]\n row += ['']\n row += [*val['Calign25'][0:2], '{0:.2f}'.format(val['Calign25'][2]), *val['Calign25'][3:5],\n '{0:.2f}'.format(val['Calign25'][5])]\n row += ['']\n row += [*val['Calign50'][0:2], '{0:.2f}'.format(val['Calign50'][2]), *val['Calign50'][3:5],\n '{0:.2f}'.format(val['Calign50'][5])]\n f.write(' & '.join([str(x) for x in row]) + '\\\\\\\\\\n')\n\n\ndef write_assembly_stats2(statsdict: Dict) -> None:\n with open('/home/andreas/GDrive/workspace/sparsedata/assembly_stats2.csv', 'w') as f:\n f_csv = csv.writer(f, delimiter=',')\n refs = [ref1_name, ref2_name]\n f_csv.writerow(range(len(refs) * 9))\n\n f_csv.writerow(\n [stats_dict[(ref_name, c, l)]['Actual'][0] for ref_name in refs for c in\n coverages for l in average_length_list])\n f_csv.writerow(\n [stats_dict[(ref_name, c, l)]['Actual'][1] for ref_name in refs for c in\n coverages for l\n in average_length_list])\n f_csv.writerow(\n [stats_dict[(ref_name, c, l)]['Actual'][2] for ref_name in refs for c in\n coverages for l\n in average_length_list])\n for foo in ['Calign', 'Calign25', 'Calign50']:\n for i in range(6):\n if i in [2, 5]:\n f_csv.writerow(\n ['{0:.2f}'.format(stats_dict[(ref_name, c, l)][foo][i]) for ref_name in refs for c in\n coverages\n for l in average_length_list])\n else:\n f_csv.writerow(\n [stats_dict[(ref_name, c, l)][foo][i] for ref_name in refs for c in\n coverages\n for l in average_length_list])\n\n\nassembly_stats_list = []\nstats_dict = {}\n# for dir in sorted(glob.glob('/home/andreas/GDrive/workspace/sparsedata/ref[1,2,3]_c[5,20,40]*/')):\nfor ref_number in [1, 2, 3]:\n for coverage in coverages:\n for length in average_length_list:\n # file_sub_dir = dir.split('/')[-2] # example ref1_c5_l100\n # ref_number = int(file_sub_dir.split('ref')[1].split('_')[0])\n ref_name = references[ref_number - 1]\n # coverage = int(file_sub_dir.split('_c')[1].split('_')[0])\n # length = int(file_sub_dir.split('_l')[1])\n dir = '/home/andreas/GDrive/workspace/sparsedata/ref{}_c{}_l{}/'.format(ref_number, coverage, length)\n stats_dict[(ref_name, coverage, length)] = {'Actual': read_fasta_stats_file(dir + 'fasta.stat'),\n 'Calign': read_assembly_file(dir + 'calign.assembly'),\n 'Calign25': read_assembly_file(\n dir + 'calign_0_{}.assembly'.format(length // 4)),\n 'Calign50': read_assembly_file(\n dir + 'calign_0_{}.assembly'.format(length // 2))}\n\n\n # dir = '{}-{}-{}'.format(references[ref_number - 1], coverage, length)\n # assembly_stats_list.append(\n # Assembly_Stats(dir, len(lkh_contigs), lkh_value, lkh_time, len(ap_contigs), ap_value, ap_time,\n # actual_Objective_value))\n\n\ndef write_whole_stats() -> None:\n headers = ['CalignLKH', 'CalignAP', 'CalignALKH', 'CalignAAP', 'CalignBLKH',\n 'CalignBAP']\n vals = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,\n 'CalignBAP': 0}\n gaps = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,\n 'CalignBAP': 0}\n both = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,\n 'CalignBAP': 0}\n atspvsapval = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,\n 'CalignBAP': 0}\n atspvsap = {'CalignLKH': 0, 'CalignAP': 0, 'CalignALKH': 0, 'CalignAAP': 0, 'CalignBLKH': 0,\n 'CalignBAP': 0}\n with open(DIR + 'assembly_stats.csv', 'r') as f:\n f_csv = csv.DictReader(f, delimiter=',')\n for row in f_csv:\n for elem in headers:\n if row['ActualValue'] == row[elem + 'Value']:\n vals[elem] += 1\n if row['ActualGaps'] == row[elem + 'Gaps']:\n gaps[elem] += 1\n if row['ActualValue'] == row[elem + 'Value'] and row['ActualGaps'] == row[elem + 'Gaps']:\n both[elem] += 1\n if row['CalignLKHValue'] == row['CalignAPValue']:\n atspvsapval['CalignLKH'] += 1\n atspvsapval['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue']:\n atspvsapval['CalignALKH'] += 1\n atspvsapval['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue']:\n atspvsapval['CalignBLKH'] += 1\n atspvsapval['CalignBAP'] += 1\n if row['CalignLKHValue'] == row['CalignAPValue'] and row['CalignLKHGaps'] == row['CalignAPGaps']:\n atspvsap['CalignLKH'] += 1\n atspvsap['CalignAP'] += 1\n if row['CalignALKHValue'] == row['CalignAAPValue'] and row['CalignALKHGaps'] == row['CalignAAPGaps']:\n atspvsap['CalignALKH'] += 1\n atspvsap['CalignAAP'] += 1\n if row['CalignBLKHValue'] == row['CalignBAPValue'] and row['CalignBLKHGaps'] == row['CalignBAPGaps']:\n atspvsap['CalignBLKH'] += 1\n atspvsap['CalignBAP'] += 1\n with open(DIR + 'complete_stats.csv', 'w') as g:\n g_csv = csv.DictWriter(g, delimiter='&', fieldnames=headers)\n g_csv.writeheader()\n g_csv.writerow(vals)\n g_csv.writerow(gaps)\n g_csv.writerow(both)\n g_csv.writerow(atspvsapval)\n g_csv.writerow(atspvsap)\n\n\nwrite_assembly_stats(stats_dict)\nwrite_assembly_stats2(stats_dict)\nwrite_assembly_stats_tex(stats_dict)\nwrite_whole_stats()\n",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
CARD_SIZE = 70, 90
SPACING = 3
<|reserved_special_token_1|>
CARD_SIZE = (70, 90)
SPACING = 3
|
flexible
|
{
"blob_id": "b8ebbef7403a71d6165a5462bc08e2634b4cebc5",
"index": 4287,
"step-1": "<mask token>\n",
"step-2": "CARD_SIZE = 70, 90\nSPACING = 3\n",
"step-3": "CARD_SIZE = (70, 90)\nSPACING = 3",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from math import log
result = []
formula = int(input("For exit press 0\nChoose the formula #1 #2 #3: "))
while (formula >= 0) and (formula <= 3):
a = float(input("Enter a:"))
min_x = float(input("Enter minx:"))
max_x = float(input("Enter maxx:"))
step = int(input("Enter steps:"))
x = min_x
if formula == 1:
d = (-45*a**2+26*a*x+7*x**2)
if d !=0:
for i in range(step):
while x <= max_x:
G = ((-7*(4*a**2+15*a*x-4*x**2))/d)
result.append(G)
print("x=%.3f \tG=%.3f" % (float(x), G))
x += (max_x-min_x)/(step-1)
break
else:
print("Err")
elif formula == 2:
for i in range(step):
while x <= max_x:
F = (2**(40*(a**2)-107*a*x+63*(x**2)))
result.append(F)
print("x=%.3f \tF=%.3f" % (float(x), F))
x += (max_x-min_x)/(step-1)
break
elif formula == 3:
for i in range(step):
while x <= max_x:
Y = log(a**2-2*a*x+3*x**2+1)
result.append(Y)
print("x=%.3f \tY=%.3f" % (float(x), Y))
x += (max_x-min_x)/(step-1)
break
else:
print("Err")
print("Max.res. = ", max(result))
print("Min.res. = ", min(result))
|
normal
|
{
"blob_id": "44c4a1f4b32b45fd95eb8b0a42a718d05d967e04",
"index": 2536,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile formula >= 0 and formula <= 3:\n a = float(input('Enter a:'))\n min_x = float(input('Enter minx:'))\n max_x = float(input('Enter maxx:'))\n step = int(input('Enter steps:'))\n x = min_x\n if formula == 1:\n d = -45 * a ** 2 + 26 * a * x + 7 * x ** 2\n if d != 0:\n for i in range(step):\n while x <= max_x:\n G = -7 * (4 * a ** 2 + 15 * a * x - 4 * x ** 2) / d\n result.append(G)\n print('x=%.3f \\tG=%.3f' % (float(x), G))\n x += (max_x - min_x) / (step - 1)\n break\n else:\n print('Err')\n elif formula == 2:\n for i in range(step):\n while x <= max_x:\n F = 2 ** (40 * a ** 2 - 107 * a * x + 63 * x ** 2)\n result.append(F)\n print('x=%.3f \\tF=%.3f' % (float(x), F))\n x += (max_x - min_x) / (step - 1)\n break\n elif formula == 3:\n for i in range(step):\n while x <= max_x:\n Y = log(a ** 2 - 2 * a * x + 3 * x ** 2 + 1)\n result.append(Y)\n print('x=%.3f \\tY=%.3f' % (float(x), Y))\n x += (max_x - min_x) / (step - 1)\n break\nelse:\n print('Err')\nprint('Max.res. = ', max(result))\nprint('Min.res. = ', min(result))\n",
"step-3": "<mask token>\nresult = []\nformula = int(input(\"\"\"For exit press 0\nChoose the formula #1 #2 #3: \"\"\"))\nwhile formula >= 0 and formula <= 3:\n a = float(input('Enter a:'))\n min_x = float(input('Enter minx:'))\n max_x = float(input('Enter maxx:'))\n step = int(input('Enter steps:'))\n x = min_x\n if formula == 1:\n d = -45 * a ** 2 + 26 * a * x + 7 * x ** 2\n if d != 0:\n for i in range(step):\n while x <= max_x:\n G = -7 * (4 * a ** 2 + 15 * a * x - 4 * x ** 2) / d\n result.append(G)\n print('x=%.3f \\tG=%.3f' % (float(x), G))\n x += (max_x - min_x) / (step - 1)\n break\n else:\n print('Err')\n elif formula == 2:\n for i in range(step):\n while x <= max_x:\n F = 2 ** (40 * a ** 2 - 107 * a * x + 63 * x ** 2)\n result.append(F)\n print('x=%.3f \\tF=%.3f' % (float(x), F))\n x += (max_x - min_x) / (step - 1)\n break\n elif formula == 3:\n for i in range(step):\n while x <= max_x:\n Y = log(a ** 2 - 2 * a * x + 3 * x ** 2 + 1)\n result.append(Y)\n print('x=%.3f \\tY=%.3f' % (float(x), Y))\n x += (max_x - min_x) / (step - 1)\n break\nelse:\n print('Err')\nprint('Max.res. = ', max(result))\nprint('Min.res. = ', min(result))\n",
"step-4": "from math import log\nresult = []\nformula = int(input(\"\"\"For exit press 0\nChoose the formula #1 #2 #3: \"\"\"))\nwhile formula >= 0 and formula <= 3:\n a = float(input('Enter a:'))\n min_x = float(input('Enter minx:'))\n max_x = float(input('Enter maxx:'))\n step = int(input('Enter steps:'))\n x = min_x\n if formula == 1:\n d = -45 * a ** 2 + 26 * a * x + 7 * x ** 2\n if d != 0:\n for i in range(step):\n while x <= max_x:\n G = -7 * (4 * a ** 2 + 15 * a * x - 4 * x ** 2) / d\n result.append(G)\n print('x=%.3f \\tG=%.3f' % (float(x), G))\n x += (max_x - min_x) / (step - 1)\n break\n else:\n print('Err')\n elif formula == 2:\n for i in range(step):\n while x <= max_x:\n F = 2 ** (40 * a ** 2 - 107 * a * x + 63 * x ** 2)\n result.append(F)\n print('x=%.3f \\tF=%.3f' % (float(x), F))\n x += (max_x - min_x) / (step - 1)\n break\n elif formula == 3:\n for i in range(step):\n while x <= max_x:\n Y = log(a ** 2 - 2 * a * x + 3 * x ** 2 + 1)\n result.append(Y)\n print('x=%.3f \\tY=%.3f' % (float(x), Y))\n x += (max_x - min_x) / (step - 1)\n break\nelse:\n print('Err')\nprint('Max.res. = ', max(result))\nprint('Min.res. = ', min(result))\n",
"step-5": "from math import log\n\nresult = []\n\nformula = int(input(\"For exit press 0\\nChoose the formula #1 #2 #3: \"))\nwhile (formula >= 0) and (formula <= 3):\n a = float(input(\"Enter a:\"))\n min_x = float(input(\"Enter minx:\"))\n max_x = float(input(\"Enter maxx:\"))\n step = int(input(\"Enter steps:\"))\n x = min_x\n\n if formula == 1:\n d = (-45*a**2+26*a*x+7*x**2)\n if d !=0:\n for i in range(step):\n while x <= max_x:\n G = ((-7*(4*a**2+15*a*x-4*x**2))/d)\n result.append(G)\n print(\"x=%.3f \\tG=%.3f\" % (float(x), G))\n x += (max_x-min_x)/(step-1)\n break\n else:\n print(\"Err\")\n\n elif formula == 2:\n for i in range(step):\n while x <= max_x:\n F = (2**(40*(a**2)-107*a*x+63*(x**2)))\n result.append(F)\n print(\"x=%.3f \\tF=%.3f\" % (float(x), F))\n x += (max_x-min_x)/(step-1)\n break\n\n elif formula == 3:\n for i in range(step):\n while x <= max_x:\n Y = log(a**2-2*a*x+3*x**2+1)\n result.append(Y)\n print(\"x=%.3f \\tY=%.3f\" % (float(x), Y))\n x += (max_x-min_x)/(step-1)\n break\nelse:\n print(\"Err\")\nprint(\"Max.res. = \", max(result))\nprint(\"Min.res. = \", min(result))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for _ in range(int(input())):
n = int(input())
temp = n
rev = 0
while temp:
rev = rev * 10 + temp % 10
temp //= 10
print('Yes' if rev == n else 'No')
<|reserved_special_token_1|>
"""
Problem Link: https://practice.geeksforgeeks.org/problems/palindrome/0
Given an integer, check whether it is a palindrome or not.
Input:
The first line of input contains an integer T denoting the number of test cases.
For each test case there will be single line containing single integer N.
Output:
Print "Yes" or "No" (without quotes) depending on whether the number is palindrome or not.
Constraints:
1 <= T <= 1000
1 <= N <= 10000
Example:
Input:
3
6
167
55555
Output:
Yes
No
Yes
"""
for _ in range(int(input())):
n = int(input())
temp = n
rev = 0
while temp:
rev = (rev*10)+(temp%10)
temp //= 10
print("Yes" if rev == n else "No")
|
flexible
|
{
"blob_id": "ea12ede51881f6e826a044df5d7aba457c434658",
"index": 6050,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor _ in range(int(input())):\n n = int(input())\n temp = n\n rev = 0\n while temp:\n rev = rev * 10 + temp % 10\n temp //= 10\n print('Yes' if rev == n else 'No')\n",
"step-3": "\"\"\"\nProblem Link: https://practice.geeksforgeeks.org/problems/palindrome/0\n\nGiven an integer, check whether it is a palindrome or not.\n\nInput:\nThe first line of input contains an integer T denoting the number of test cases. \nFor each test case there will be single line containing single integer N.\n\nOutput:\nPrint \"Yes\" or \"No\" (without quotes) depending on whether the number is palindrome or not.\n\nConstraints:\n1 <= T <= 1000\n1 <= N <= 10000\n\nExample:\nInput:\n3\n6\n167\n55555\n\nOutput:\nYes\nNo\nYes\n\"\"\"\nfor _ in range(int(input())):\n n = int(input())\n temp = n\n rev = 0\n while temp:\n rev = (rev*10)+(temp%10)\n temp //= 10\n print(\"Yes\" if rev == n else \"No\")",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class BaseException(Exception):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class BaseException(Exception):
def __init__(self, message=''):
super(BaseException, self).__init__()
self.message = message
<|reserved_special_token_1|>
class BaseException(Exception):
def __init__(self, message=""):
super(BaseException, self).__init__()
self.message = message
|
flexible
|
{
"blob_id": "2ee1539e051677ad38ab7727ff5edefb1aebd015",
"index": 9946,
"step-1": "<mask token>\n",
"step-2": "class BaseException(Exception):\n <mask token>\n",
"step-3": "class BaseException(Exception):\n\n def __init__(self, message=''):\n super(BaseException, self).__init__()\n self.message = message\n",
"step-4": "class BaseException(Exception):\n def __init__(self, message=\"\"):\n super(BaseException, self).__init__()\n self.message = message\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import random
OPTIONS = ['rock', 'paper', 'scissors']
def get_human_choice():
print('(1) Rock\n(2) Paper\n(3) Scissors')
return OPTIONS[int(input('Enter the number of your choice: ')) - 1]
def get_computer_choice():
return random.choice(OPTIONS)
def print_choices(human_choice, computer_choice):
print(f'You chose {human_choice.title()}')
print(f'The computer chose {computer_choice.title()}')
def eval_game_result(human_choice, computer_choice):
if human_choice == computer_choice:
return 'draw'
elif human_choice == 'rock':
return 'human' if computer_choice == 'scissors' else 'computer'
elif human_choice == 'paper':
return 'human' if computer_choice == 'rock' else 'computer'
else:
return 'human' if computer_choice == 'paper' else 'computer'
def compose_output_message(result, human_choice, computer_choice):
if result == 'draw':
return 'Draw!'
elif result == 'human':
return f'Yes, {human_choice} beat {computer_choice}!'
else:
return f'Sorry, {computer_choice} beat {human_choice}'
def print_result(message):
print(message)
human_choice = get_human_choice()
computer_choice = get_computer_choice()
print_choices(human_choice, computer_choice)
game_result = eval_game_result(human_choice, computer_choice)
print_result(compose_output_message(game_result, human_choice, computer_choice)
)
|
normal
|
{
"blob_id": "2e6bce05c8ba21aa322e306d2cdb8871531d7341",
"index": 5499,
"step-1": "<mask token>\n\n\ndef get_human_choice():\n print('(1) Rock\\n(2) Paper\\n(3) Scissors')\n return OPTIONS[int(input('Enter the number of your choice: ')) - 1]\n\n\ndef get_computer_choice():\n return random.choice(OPTIONS)\n\n\ndef print_choices(human_choice, computer_choice):\n print(f'You chose {human_choice.title()}')\n print(f'The computer chose {computer_choice.title()}')\n\n\ndef eval_game_result(human_choice, computer_choice):\n if human_choice == computer_choice:\n return 'draw'\n elif human_choice == 'rock':\n return 'human' if computer_choice == 'scissors' else 'computer'\n elif human_choice == 'paper':\n return 'human' if computer_choice == 'rock' else 'computer'\n else:\n return 'human' if computer_choice == 'paper' else 'computer'\n\n\ndef compose_output_message(result, human_choice, computer_choice):\n if result == 'draw':\n return 'Draw!'\n elif result == 'human':\n return f'Yes, {human_choice} beat {computer_choice}!'\n else:\n return f'Sorry, {computer_choice} beat {human_choice}'\n\n\ndef print_result(message):\n print(message)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_human_choice():\n print('(1) Rock\\n(2) Paper\\n(3) Scissors')\n return OPTIONS[int(input('Enter the number of your choice: ')) - 1]\n\n\ndef get_computer_choice():\n return random.choice(OPTIONS)\n\n\ndef print_choices(human_choice, computer_choice):\n print(f'You chose {human_choice.title()}')\n print(f'The computer chose {computer_choice.title()}')\n\n\ndef eval_game_result(human_choice, computer_choice):\n if human_choice == computer_choice:\n return 'draw'\n elif human_choice == 'rock':\n return 'human' if computer_choice == 'scissors' else 'computer'\n elif human_choice == 'paper':\n return 'human' if computer_choice == 'rock' else 'computer'\n else:\n return 'human' if computer_choice == 'paper' else 'computer'\n\n\ndef compose_output_message(result, human_choice, computer_choice):\n if result == 'draw':\n return 'Draw!'\n elif result == 'human':\n return f'Yes, {human_choice} beat {computer_choice}!'\n else:\n return f'Sorry, {computer_choice} beat {human_choice}'\n\n\ndef print_result(message):\n print(message)\n\n\n<mask token>\nprint_choices(human_choice, computer_choice)\n<mask token>\nprint_result(compose_output_message(game_result, human_choice, computer_choice)\n )\n",
"step-3": "<mask token>\nOPTIONS = ['rock', 'paper', 'scissors']\n\n\ndef get_human_choice():\n print('(1) Rock\\n(2) Paper\\n(3) Scissors')\n return OPTIONS[int(input('Enter the number of your choice: ')) - 1]\n\n\ndef get_computer_choice():\n return random.choice(OPTIONS)\n\n\ndef print_choices(human_choice, computer_choice):\n print(f'You chose {human_choice.title()}')\n print(f'The computer chose {computer_choice.title()}')\n\n\ndef eval_game_result(human_choice, computer_choice):\n if human_choice == computer_choice:\n return 'draw'\n elif human_choice == 'rock':\n return 'human' if computer_choice == 'scissors' else 'computer'\n elif human_choice == 'paper':\n return 'human' if computer_choice == 'rock' else 'computer'\n else:\n return 'human' if computer_choice == 'paper' else 'computer'\n\n\ndef compose_output_message(result, human_choice, computer_choice):\n if result == 'draw':\n return 'Draw!'\n elif result == 'human':\n return f'Yes, {human_choice} beat {computer_choice}!'\n else:\n return f'Sorry, {computer_choice} beat {human_choice}'\n\n\ndef print_result(message):\n print(message)\n\n\nhuman_choice = get_human_choice()\ncomputer_choice = get_computer_choice()\nprint_choices(human_choice, computer_choice)\ngame_result = eval_game_result(human_choice, computer_choice)\nprint_result(compose_output_message(game_result, human_choice, computer_choice)\n )\n",
"step-4": "import random\nOPTIONS = ['rock', 'paper', 'scissors']\n\n\ndef get_human_choice():\n print('(1) Rock\\n(2) Paper\\n(3) Scissors')\n return OPTIONS[int(input('Enter the number of your choice: ')) - 1]\n\n\ndef get_computer_choice():\n return random.choice(OPTIONS)\n\n\ndef print_choices(human_choice, computer_choice):\n print(f'You chose {human_choice.title()}')\n print(f'The computer chose {computer_choice.title()}')\n\n\ndef eval_game_result(human_choice, computer_choice):\n if human_choice == computer_choice:\n return 'draw'\n elif human_choice == 'rock':\n return 'human' if computer_choice == 'scissors' else 'computer'\n elif human_choice == 'paper':\n return 'human' if computer_choice == 'rock' else 'computer'\n else:\n return 'human' if computer_choice == 'paper' else 'computer'\n\n\ndef compose_output_message(result, human_choice, computer_choice):\n if result == 'draw':\n return 'Draw!'\n elif result == 'human':\n return f'Yes, {human_choice} beat {computer_choice}!'\n else:\n return f'Sorry, {computer_choice} beat {human_choice}'\n\n\ndef print_result(message):\n print(message)\n\n\nhuman_choice = get_human_choice()\ncomputer_choice = get_computer_choice()\nprint_choices(human_choice, computer_choice)\ngame_result = eval_game_result(human_choice, computer_choice)\nprint_result(compose_output_message(game_result, human_choice, computer_choice)\n )\n",
"step-5": null,
"step-ids": [
6,
7,
8,
9
]
}
|
[
6,
7,
8,
9
] |
<|reserved_special_token_0|>
class WGANUpdater(chainer.training.updaters.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis = kwargs.pop('models')
self.n_dis = kwargs.pop('n_dis')
self.lam = kwargs.pop('lam')
self.iteration = 0
super(WGANUpdater, self).__init__(*args, **kwargs)
def loss_gen(self, gen, y_fake):
batchsize = len(y_fake)
loss = F.sum(-y_fake) / batchsize
chainer.reporter.report({'loss': loss}, gen)
return loss
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class WGANUpdater(chainer.training.updaters.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis = kwargs.pop('models')
self.n_dis = kwargs.pop('n_dis')
self.lam = kwargs.pop('lam')
self.iteration = 0
super(WGANUpdater, self).__init__(*args, **kwargs)
def loss_gen(self, gen, y_fake):
batchsize = len(y_fake)
loss = F.sum(-y_fake) / batchsize
chainer.reporter.report({'loss': loss}, gen)
return loss
def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):
batchsize = len(y_fake)
xp = dis.xp
eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,
None, None]
x_mid = eps * x_real + (1.0 - eps) * x_fake
y_mid, _ = self.dis(x_mid)
grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)
grad = F.sqrt(F.batch_l2_norm_squared(grad))
loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad
.data))
loss = F.sum(-y_real) / batchsize
loss += F.sum(y_fake) / batchsize
wasserstein_distance = -loss
loss += loss_grad
chainer.reporter.report({'wasserstein_distance':
wasserstein_distance, 'loss_grad': loss_grad})
chainer.reporter.report({'loss': loss}, dis)
return loss
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class WGANUpdater(chainer.training.updaters.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis = kwargs.pop('models')
self.n_dis = kwargs.pop('n_dis')
self.lam = kwargs.pop('lam')
self.iteration = 0
super(WGANUpdater, self).__init__(*args, **kwargs)
def loss_gen(self, gen, y_fake):
batchsize = len(y_fake)
loss = F.sum(-y_fake) / batchsize
chainer.reporter.report({'loss': loss}, gen)
return loss
def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):
batchsize = len(y_fake)
xp = dis.xp
eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,
None, None]
x_mid = eps * x_real + (1.0 - eps) * x_fake
y_mid, _ = self.dis(x_mid)
grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)
grad = F.sqrt(F.batch_l2_norm_squared(grad))
loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad
.data))
loss = F.sum(-y_real) / batchsize
loss += F.sum(y_fake) / batchsize
wasserstein_distance = -loss
loss += loss_grad
chainer.reporter.report({'wasserstein_distance':
wasserstein_distance, 'loss_grad': loss_grad})
chainer.reporter.report({'loss': loss}, dis)
return loss
def update_core(self):
gen_optimizer = self.get_optimizer('gen')
dis_optimizer = self.get_optimizer('dis')
xp = self.gen.xp
for i in range(self.n_dis):
batch = self.get_iterator('main').next()
batchsize = len(batch)
x = []
for j in range(batchsize):
x.append(np.asarray(batch[j]).astype('f'))
x_real = Variable(xp.asarray(x))
y_real, _ = self.dis(x_real)
z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))
x_fake = self.gen(z)
y_fake, _ = self.dis(x_fake)
if i == 0:
gen_optimizer.update(self.loss_gen, self.gen, y_fake)
x_fake.unchain_backward()
dis_optimizer.update(self.loss_dis, self.dis, y_real, y_fake,
x_real, x_fake)
<|reserved_special_token_1|>
import numpy as np
import chainer
import chainer.functions as F
from chainer import Variable
from chainer.dataset import convert
class WGANUpdater(chainer.training.updaters.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis = kwargs.pop('models')
self.n_dis = kwargs.pop('n_dis')
self.lam = kwargs.pop('lam')
self.iteration = 0
super(WGANUpdater, self).__init__(*args, **kwargs)
def loss_gen(self, gen, y_fake):
batchsize = len(y_fake)
loss = F.sum(-y_fake) / batchsize
chainer.reporter.report({'loss': loss}, gen)
return loss
def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):
batchsize = len(y_fake)
xp = dis.xp
eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,
None, None]
x_mid = eps * x_real + (1.0 - eps) * x_fake
y_mid, _ = self.dis(x_mid)
grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)
grad = F.sqrt(F.batch_l2_norm_squared(grad))
loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad
.data))
loss = F.sum(-y_real) / batchsize
loss += F.sum(y_fake) / batchsize
wasserstein_distance = -loss
loss += loss_grad
chainer.reporter.report({'wasserstein_distance':
wasserstein_distance, 'loss_grad': loss_grad})
chainer.reporter.report({'loss': loss}, dis)
return loss
def update_core(self):
gen_optimizer = self.get_optimizer('gen')
dis_optimizer = self.get_optimizer('dis')
xp = self.gen.xp
for i in range(self.n_dis):
batch = self.get_iterator('main').next()
batchsize = len(batch)
x = []
for j in range(batchsize):
x.append(np.asarray(batch[j]).astype('f'))
x_real = Variable(xp.asarray(x))
y_real, _ = self.dis(x_real)
z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))
x_fake = self.gen(z)
y_fake, _ = self.dis(x_fake)
if i == 0:
gen_optimizer.update(self.loss_gen, self.gen, y_fake)
x_fake.unchain_backward()
dis_optimizer.update(self.loss_dis, self.dis, y_real, y_fake,
x_real, x_fake)
<|reserved_special_token_1|>
#!/usr/bin/python3
#https://github.com/pfnet-research/chainer-gan-lib/blob/master/wgan_gp/updater.py
import numpy as np
import chainer
import chainer.functions as F
from chainer import Variable
from chainer.dataset import convert
class WGANUpdater(chainer.training.updaters.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis = kwargs.pop('models')
self.n_dis = kwargs.pop('n_dis')
self.lam = kwargs.pop('lam')
self.iteration = 0
super(WGANUpdater, self).__init__(*args, **kwargs)
def loss_gen(self, gen, y_fake):
batchsize = len(y_fake)
loss = F.sum(-y_fake)/batchsize
chainer.reporter.report({'loss': loss}, gen)
return loss
def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):
batchsize = len(y_fake)
xp = dis.xp
eps = xp.random.uniform(0, 1, size=batchsize)\
.astype("f")[:, None, None, None]
x_mid = eps * x_real + (1.0 - eps) * x_fake
y_mid,_ = self.dis(x_mid)
grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)
grad = F.sqrt(F.batch_l2_norm_squared(grad))
loss_grad = self.lam * F.mean_squared_error(grad,
xp.ones_like(grad.data))
loss = F.sum(-y_real) / batchsize
loss += F.sum(y_fake) / batchsize
wasserstein_distance = -loss
loss += loss_grad
chainer.reporter.report({'wasserstein_distance': wasserstein_distance,
'loss_grad':loss_grad})
chainer.reporter.report({'loss': loss}, dis)
return loss
def update_core(self):
gen_optimizer = self.get_optimizer('gen')
dis_optimizer = self.get_optimizer('dis')
xp = self.gen.xp
for i in range(self.n_dis):
batch = self.get_iterator('main').next()
batchsize = len(batch)
x = []
for j in range(batchsize):
x.append(np.asarray(batch[j]).astype("f"))
x_real = Variable(xp.asarray(x))
y_real,_ = self.dis(x_real)
z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))
x_fake = self.gen(z)
y_fake,_ = self.dis(x_fake)
if i == 0:
gen_optimizer.update(self.loss_gen, self.gen, y_fake)
x_fake.unchain_backward()
dis_optimizer.update(self.loss_dis, self.dis,
y_real, y_fake, x_real, x_fake)
|
flexible
|
{
"blob_id": "a7099b2506de08893ca849146813505d88784895",
"index": 2402,
"step-1": "<mask token>\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,\n None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n y_mid, _ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad\n .data))\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance':\n wasserstein_distance, 'loss_grad': loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,\n None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n y_mid, _ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad\n .data))\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance':\n wasserstein_distance, 'loss_grad': loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n\n def update_core(self):\n gen_optimizer = self.get_optimizer('gen')\n dis_optimizer = self.get_optimizer('dis')\n xp = self.gen.xp\n for i in range(self.n_dis):\n batch = self.get_iterator('main').next()\n batchsize = len(batch)\n x = []\n for j in range(batchsize):\n x.append(np.asarray(batch[j]).astype('f'))\n x_real = Variable(xp.asarray(x))\n y_real, _ = self.dis(x_real)\n z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))\n x_fake = self.gen(z)\n y_fake, _ = self.dis(x_fake)\n if i == 0:\n gen_optimizer.update(self.loss_gen, self.gen, y_fake)\n x_fake.unchain_backward()\n dis_optimizer.update(self.loss_dis, self.dis, y_real, y_fake,\n x_real, x_fake)\n",
"step-4": "import numpy as np\nimport chainer\nimport chainer.functions as F\nfrom chainer import Variable\nfrom chainer.dataset import convert\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,\n None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n y_mid, _ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad\n .data))\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance':\n wasserstein_distance, 'loss_grad': loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n\n def update_core(self):\n gen_optimizer = self.get_optimizer('gen')\n dis_optimizer = self.get_optimizer('dis')\n xp = self.gen.xp\n for i in range(self.n_dis):\n batch = self.get_iterator('main').next()\n batchsize = len(batch)\n x = []\n for j in range(batchsize):\n x.append(np.asarray(batch[j]).astype('f'))\n x_real = Variable(xp.asarray(x))\n y_real, _ = self.dis(x_real)\n z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))\n x_fake = self.gen(z)\n y_fake, _ = self.dis(x_fake)\n if i == 0:\n gen_optimizer.update(self.loss_gen, self.gen, y_fake)\n x_fake.unchain_backward()\n dis_optimizer.update(self.loss_dis, self.dis, y_real, y_fake,\n x_real, x_fake)\n",
"step-5": "#!/usr/bin/python3\n#https://github.com/pfnet-research/chainer-gan-lib/blob/master/wgan_gp/updater.py\n\nimport numpy as np\nimport chainer\nimport chainer.functions as F\nfrom chainer import Variable\nfrom chainer.dataset import convert\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake)/batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n\n eps = xp.random.uniform(0, 1, size=batchsize)\\\n .astype(\"f\")[:, None, None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n\n y_mid,_ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, \n xp.ones_like(grad.data))\n\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance': wasserstein_distance,\n 'loss_grad':loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n\n def update_core(self):\n gen_optimizer = self.get_optimizer('gen')\n dis_optimizer = self.get_optimizer('dis')\n xp = self.gen.xp\n\n for i in range(self.n_dis):\n batch = self.get_iterator('main').next()\n batchsize = len(batch)\n x = []\n for j in range(batchsize):\n x.append(np.asarray(batch[j]).astype(\"f\"))\n x_real = Variable(xp.asarray(x))\n y_real,_ = self.dis(x_real)\n\n z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))\n x_fake = self.gen(z)\n y_fake,_ = self.dis(x_fake)\n\n if i == 0:\n gen_optimizer.update(self.loss_gen, self.gen, y_fake)\n x_fake.unchain_backward()\n\n dis_optimizer.update(self.loss_dis, self.dis, \n y_real, y_fake, x_real, x_fake)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def custom_proc(request):
"""
自定义context_processors
"""
return {'context_test': 'test'}
<|reserved_special_token_1|>
#!/usr/bin/env python
#coding:utf-8
'''
Created on 2016年8月29日
@author: lichen
'''
def custom_proc(request):
"""
自定义context_processors
"""
return {
"context_test":"test"
}
|
flexible
|
{
"blob_id": "43ecb173e3d306284f2122410b5b74945572f683",
"index": 8104,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef custom_proc(request):\n \"\"\"\n 自定义context_processors\n \"\"\"\n return {'context_test': 'test'}\n",
"step-3": "#!/usr/bin/env python\n#coding:utf-8\n\n'''\nCreated on 2016年8月29日\n\n@author: lichen\n'''\n\ndef custom_proc(request):\n \"\"\"\n 自定义context_processors\n \"\"\"\n return {\n \"context_test\":\"test\"\n }",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import tkinter as tk
import tkinter.ttk as ttk
import GUIForm
import sys
def main():
global window
global _form
print("You are using Python {}.{}.{}".format(sys.version_info.major, sys.version_info.minor, sys.version_info.micro))
window=tk.Tk()
GUIForm.BuildInterface(window)
window.mainloop()
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "ca5057a5fdfef0edf4cf0c3ff3e2a371907ca4ee",
"index": 1270,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n global window\n global _form\n print('You are using Python {}.{}.{}'.format(sys.version_info.major,\n sys.version_info.minor, sys.version_info.micro))\n window = tk.Tk()\n GUIForm.BuildInterface(window)\n window.mainloop()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n global window\n global _form\n print('You are using Python {}.{}.{}'.format(sys.version_info.major,\n sys.version_info.minor, sys.version_info.micro))\n window = tk.Tk()\n GUIForm.BuildInterface(window)\n window.mainloop()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import tkinter as tk\nimport tkinter.ttk as ttk\nimport GUIForm\nimport sys\n\n\ndef main():\n global window\n global _form\n print('You are using Python {}.{}.{}'.format(sys.version_info.major,\n sys.version_info.minor, sys.version_info.micro))\n window = tk.Tk()\n GUIForm.BuildInterface(window)\n window.mainloop()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import tkinter as tk\nimport tkinter.ttk as ttk\nimport GUIForm\nimport sys\n\ndef main():\n global window\n global _form\n\n print(\"You are using Python {}.{}.{}\".format(sys.version_info.major, sys.version_info.minor, sys.version_info.micro))\n\n window=tk.Tk()\n GUIForm.BuildInterface(window)\n window.mainloop()\n\nif __name__ == \"__main__\":\n main()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param A : root node of tree
# @return a list of integers
def solve(self, root):
if not root:
return
result = []
result.append(root.val)
Solution.pre_util(Solution, root.left, result)
Solution.leaf_nodes_util(Solution, root.left, result)
Solution.leaf_nodes_util(Solution, root.right, result)
Solution.post_util(Solution, root.right, result)
return result
def leaf_nodes_util(self, curr, result):
if not curr:
return
self.leaf_nodes_util(self,curr.left, result)
if not curr.left and not curr.right:
result.append(curr.val)
self.leaf_nodes_util(self, curr.right, result)
def pre_util(self, root, result):
if not root or (root and not root.left and not root.right):
return
result.append(root.val)
if root.left:
Solution.pre_util(Solution, root.left, result)
elif root.right:
Solution.pre_util(Solution, root.right, result)
def post_util(self, root, result):
if not root or (root and not root.left and not root.right):
return
if root.right:
Solution.post_util(Solution, root.right, result)
elif root.left:
Solution.post_util(Solution, root.left, result)
result.append(root.val)
if __name__ == '__main__':
A = [1,2,3,4,5,6,None,None,None,7,8,9,10]
from src.util.binary_tree_util import binary_tree_util
print(Solution.solve(Solution,binary_tree_util.build(A)))
|
normal
|
{
"blob_id": "a49ee1e3f600d83486d0cf2396ed261c61fdf926",
"index": 1711,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def leaf_nodes_util(self, curr, result):\n if not curr:\n return\n self.leaf_nodes_util(self, curr.left, result)\n if not curr.left and not curr.right:\n result.append(curr.val)\n self.leaf_nodes_util(self, curr.right, result)\n <mask token>\n\n def post_util(self, root, result):\n if not root or root and not root.left and not root.right:\n return\n if root.right:\n Solution.post_util(Solution, root.right, result)\n elif root.left:\n Solution.post_util(Solution, root.left, result)\n result.append(root.val)\n\n\n<mask token>\n",
"step-2": "class TreeNode:\n <mask token>\n\n\nclass Solution:\n\n def solve(self, root):\n if not root:\n return\n result = []\n result.append(root.val)\n Solution.pre_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.right, result)\n Solution.post_util(Solution, root.right, result)\n return result\n\n def leaf_nodes_util(self, curr, result):\n if not curr:\n return\n self.leaf_nodes_util(self, curr.left, result)\n if not curr.left and not curr.right:\n result.append(curr.val)\n self.leaf_nodes_util(self, curr.right, result)\n\n def pre_util(self, root, result):\n if not root or root and not root.left and not root.right:\n return\n result.append(root.val)\n if root.left:\n Solution.pre_util(Solution, root.left, result)\n elif root.right:\n Solution.pre_util(Solution, root.right, result)\n\n def post_util(self, root, result):\n if not root or root and not root.left and not root.right:\n return\n if root.right:\n Solution.post_util(Solution, root.right, result)\n elif root.left:\n Solution.post_util(Solution, root.left, result)\n result.append(root.val)\n\n\n<mask token>\n",
"step-3": "class TreeNode:\n\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\nclass Solution:\n\n def solve(self, root):\n if not root:\n return\n result = []\n result.append(root.val)\n Solution.pre_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.right, result)\n Solution.post_util(Solution, root.right, result)\n return result\n\n def leaf_nodes_util(self, curr, result):\n if not curr:\n return\n self.leaf_nodes_util(self, curr.left, result)\n if not curr.left and not curr.right:\n result.append(curr.val)\n self.leaf_nodes_util(self, curr.right, result)\n\n def pre_util(self, root, result):\n if not root or root and not root.left and not root.right:\n return\n result.append(root.val)\n if root.left:\n Solution.pre_util(Solution, root.left, result)\n elif root.right:\n Solution.pre_util(Solution, root.right, result)\n\n def post_util(self, root, result):\n if not root or root and not root.left and not root.right:\n return\n if root.right:\n Solution.post_util(Solution, root.right, result)\n elif root.left:\n Solution.post_util(Solution, root.left, result)\n result.append(root.val)\n\n\n<mask token>\n",
"step-4": "class TreeNode:\n\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\nclass Solution:\n\n def solve(self, root):\n if not root:\n return\n result = []\n result.append(root.val)\n Solution.pre_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.right, result)\n Solution.post_util(Solution, root.right, result)\n return result\n\n def leaf_nodes_util(self, curr, result):\n if not curr:\n return\n self.leaf_nodes_util(self, curr.left, result)\n if not curr.left and not curr.right:\n result.append(curr.val)\n self.leaf_nodes_util(self, curr.right, result)\n\n def pre_util(self, root, result):\n if not root or root and not root.left and not root.right:\n return\n result.append(root.val)\n if root.left:\n Solution.pre_util(Solution, root.left, result)\n elif root.right:\n Solution.pre_util(Solution, root.right, result)\n\n def post_util(self, root, result):\n if not root or root and not root.left and not root.right:\n return\n if root.right:\n Solution.post_util(Solution, root.right, result)\n elif root.left:\n Solution.post_util(Solution, root.left, result)\n result.append(root.val)\n\n\nif __name__ == '__main__':\n A = [1, 2, 3, 4, 5, 6, None, None, None, 7, 8, 9, 10]\n from src.util.binary_tree_util import binary_tree_util\n print(Solution.solve(Solution, binary_tree_util.build(A)))\n",
"step-5": "class TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass Solution:\n # @param A : root node of tree\n # @return a list of integers\n def solve(self, root):\n if not root:\n return\n result = []\n result.append(root.val)\n Solution.pre_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.left, result)\n Solution.leaf_nodes_util(Solution, root.right, result)\n Solution.post_util(Solution, root.right, result)\n return result\n\n def leaf_nodes_util(self, curr, result):\n if not curr:\n return\n self.leaf_nodes_util(self,curr.left, result)\n if not curr.left and not curr.right:\n result.append(curr.val)\n self.leaf_nodes_util(self, curr.right, result)\n\n\n def pre_util(self, root, result):\n if not root or (root and not root.left and not root.right):\n return\n result.append(root.val)\n if root.left:\n Solution.pre_util(Solution, root.left, result)\n elif root.right:\n Solution.pre_util(Solution, root.right, result)\n\n def post_util(self, root, result):\n if not root or (root and not root.left and not root.right):\n return\n if root.right:\n Solution.post_util(Solution, root.right, result)\n elif root.left:\n Solution.post_util(Solution, root.left, result)\n result.append(root.val)\n\n\nif __name__ == '__main__':\n A = [1,2,3,4,5,6,None,None,None,7,8,9,10]\n from src.util.binary_tree_util import binary_tree_util\n print(Solution.solve(Solution,binary_tree_util.build(A)))",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
# Written by Jagannath Bilgi <jsbilgi@yahoo.com>
import sys
import json
import re
"""
Program accepts *.md document and converts to csv in required format
Program parse line by line and uses recursive method to traverse from leaf to root.
Single turn object (string, int etc) is used as point of return from recursion.
"""
default_input_file = ''
default_output_file = ''
no_of_parameters = len(sys.argv)
if no_of_parameters == 1:
f = open('awesome-transform.param')
for lno, fname in enumerate(f):
if lno == 0:
default_input_file = fname.rstrip()
else:
default_output_file = fname.rstrip()
f.close()
if no_of_parameters < 2:
input_file = default_input_file
else:
input_file = sys.argv[1]
if no_of_parameters < 3:
output_file = default_output_file
else:
output_file = sys.argv[2]
with open(input_file) as f:
json_data = json.load(f)
def obj_rec(obj, t, flag=0,acc=''):
v_obj = type(obj)
r = ''
if type(obj) not in [dict, list, map]:
ref_url = re.findall(r'\((http.*?)\)', obj)
ref_title = re.findall(r'\[[^\[\]]*\]', obj)
if ref_url :
url = ref_url[len(ref_url)-1].strip('[]')
title = ref_title[len(ref_title)-1].strip('[]')
url_title = title + ',' + url
else:
url = ''
title = ''
url_title = title + ',' + url
return acc
if acc:
if flag == 0:
return acc + '\n'
else:
return acc + url_title + ',' + '"' + t + '"' + '\n'
else:
if flag == 0:
return ',,"' + url_title + ',' + t + '"' + '\n'
else:
return ',' + url_title + ',' + '"' + t + '"' + '\n'
elif v_obj == list:
if obj :
return obj_rec(obj[1:], t, flag, obj_rec(obj[0], t , 1, acc))
else:
return acc
elif v_obj == dict:
if bool(obj):
for o in obj:
k = o
oo = obj[o]
if type(oo) in [list,dict]:
r = obj_rec(oo, t + ',' + o, 1, acc)
acc = ""
else:
ref_url = re.findall(r'\((http.*?)\)', oo)
ref_title = re.findall(r'\[[^\[\]]*\]', oo)
if ref_url:
url = ref_url[len(ref_url) - 1].strip('[]')
title = ref_title[len(ref_title)-1].strip('[]')
url_title = title + ',' + url
else:
url = ''
title = ''
url_title = title + ',' + url
if not acc:
sep = ','
else:
sep = ''
if not url:
r = sep + url_title + ',' + '"' + t + ',' + k + '",\n'
break
del obj[k]
if not obj:
sep = '\n'
else:
sep = ''
return obj_rec(obj, t, flag, acc + r + sep)
else:
return acc[:-1]
itemlist = []
for o in json_data:
itemlist.append((obj_rec(json_data[o], o.split("(",1)[0])[1:]))
with open(output_file, 'w') as outfile:
outfile.writelines(["%s\n" % item for item in itemlist])
|
normal
|
{
"blob_id": "739921a6a09edbb81b442f4127215746c601a69a",
"index": 4990,
"step-1": "<mask token>\n\n\ndef obj_rec(obj, t, flag=0, acc=''):\n v_obj = type(obj)\n r = ''\n if type(obj) not in [dict, list, map]:\n ref_url = re.findall('\\\\((http.*?)\\\\)', obj)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', obj)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n return acc\n if acc:\n if flag == 0:\n return acc + '\\n'\n else:\n return acc + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif flag == 0:\n return ',,\"' + url_title + ',' + t + '\"' + '\\n'\n else:\n return ',' + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif v_obj == list:\n if obj:\n return obj_rec(obj[1:], t, flag, obj_rec(obj[0], t, 1, acc))\n else:\n return acc\n elif v_obj == dict:\n if bool(obj):\n for o in obj:\n k = o\n oo = obj[o]\n if type(oo) in [list, dict]:\n r = obj_rec(oo, t + ',' + o, 1, acc)\n acc = ''\n else:\n ref_url = re.findall('\\\\((http.*?)\\\\)', oo)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', oo)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n if not acc:\n sep = ','\n else:\n sep = ''\n if not url:\n r = sep + url_title + ',' + '\"' + t + ',' + k + '\",\\n'\n break\n del obj[k]\n if not obj:\n sep = '\\n'\n else:\n sep = ''\n return obj_rec(obj, t, flag, acc + r + sep)\n else:\n return acc[:-1]\n\n\n<mask token>\n",
"step-2": "<mask token>\nif no_of_parameters == 1:\n f = open('awesome-transform.param')\n for lno, fname in enumerate(f):\n if lno == 0:\n default_input_file = fname.rstrip()\n else:\n default_output_file = fname.rstrip()\n f.close()\nif no_of_parameters < 2:\n input_file = default_input_file\nelse:\n input_file = sys.argv[1]\nif no_of_parameters < 3:\n output_file = default_output_file\nelse:\n output_file = sys.argv[2]\nwith open(input_file) as f:\n json_data = json.load(f)\n\n\ndef obj_rec(obj, t, flag=0, acc=''):\n v_obj = type(obj)\n r = ''\n if type(obj) not in [dict, list, map]:\n ref_url = re.findall('\\\\((http.*?)\\\\)', obj)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', obj)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n return acc\n if acc:\n if flag == 0:\n return acc + '\\n'\n else:\n return acc + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif flag == 0:\n return ',,\"' + url_title + ',' + t + '\"' + '\\n'\n else:\n return ',' + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif v_obj == list:\n if obj:\n return obj_rec(obj[1:], t, flag, obj_rec(obj[0], t, 1, acc))\n else:\n return acc\n elif v_obj == dict:\n if bool(obj):\n for o in obj:\n k = o\n oo = obj[o]\n if type(oo) in [list, dict]:\n r = obj_rec(oo, t + ',' + o, 1, acc)\n acc = ''\n else:\n ref_url = re.findall('\\\\((http.*?)\\\\)', oo)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', oo)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n if not acc:\n sep = ','\n else:\n sep = ''\n if not url:\n r = sep + url_title + ',' + '\"' + t + ',' + k + '\",\\n'\n break\n del obj[k]\n if not obj:\n sep = '\\n'\n else:\n sep = ''\n return obj_rec(obj, t, flag, acc + r + sep)\n else:\n return acc[:-1]\n\n\n<mask token>\nfor o in json_data:\n itemlist.append(obj_rec(json_data[o], o.split('(', 1)[0])[1:])\nwith open(output_file, 'w') as outfile:\n outfile.writelines([('%s\\n' % item) for item in itemlist])\n",
"step-3": "<mask token>\ndefault_input_file = ''\ndefault_output_file = ''\nno_of_parameters = len(sys.argv)\nif no_of_parameters == 1:\n f = open('awesome-transform.param')\n for lno, fname in enumerate(f):\n if lno == 0:\n default_input_file = fname.rstrip()\n else:\n default_output_file = fname.rstrip()\n f.close()\nif no_of_parameters < 2:\n input_file = default_input_file\nelse:\n input_file = sys.argv[1]\nif no_of_parameters < 3:\n output_file = default_output_file\nelse:\n output_file = sys.argv[2]\nwith open(input_file) as f:\n json_data = json.load(f)\n\n\ndef obj_rec(obj, t, flag=0, acc=''):\n v_obj = type(obj)\n r = ''\n if type(obj) not in [dict, list, map]:\n ref_url = re.findall('\\\\((http.*?)\\\\)', obj)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', obj)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n return acc\n if acc:\n if flag == 0:\n return acc + '\\n'\n else:\n return acc + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif flag == 0:\n return ',,\"' + url_title + ',' + t + '\"' + '\\n'\n else:\n return ',' + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif v_obj == list:\n if obj:\n return obj_rec(obj[1:], t, flag, obj_rec(obj[0], t, 1, acc))\n else:\n return acc\n elif v_obj == dict:\n if bool(obj):\n for o in obj:\n k = o\n oo = obj[o]\n if type(oo) in [list, dict]:\n r = obj_rec(oo, t + ',' + o, 1, acc)\n acc = ''\n else:\n ref_url = re.findall('\\\\((http.*?)\\\\)', oo)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', oo)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n if not acc:\n sep = ','\n else:\n sep = ''\n if not url:\n r = sep + url_title + ',' + '\"' + t + ',' + k + '\",\\n'\n break\n del obj[k]\n if not obj:\n sep = '\\n'\n else:\n sep = ''\n return obj_rec(obj, t, flag, acc + r + sep)\n else:\n return acc[:-1]\n\n\nitemlist = []\nfor o in json_data:\n itemlist.append(obj_rec(json_data[o], o.split('(', 1)[0])[1:])\nwith open(output_file, 'w') as outfile:\n outfile.writelines([('%s\\n' % item) for item in itemlist])\n",
"step-4": "import sys\nimport json\nimport re\n<mask token>\ndefault_input_file = ''\ndefault_output_file = ''\nno_of_parameters = len(sys.argv)\nif no_of_parameters == 1:\n f = open('awesome-transform.param')\n for lno, fname in enumerate(f):\n if lno == 0:\n default_input_file = fname.rstrip()\n else:\n default_output_file = fname.rstrip()\n f.close()\nif no_of_parameters < 2:\n input_file = default_input_file\nelse:\n input_file = sys.argv[1]\nif no_of_parameters < 3:\n output_file = default_output_file\nelse:\n output_file = sys.argv[2]\nwith open(input_file) as f:\n json_data = json.load(f)\n\n\ndef obj_rec(obj, t, flag=0, acc=''):\n v_obj = type(obj)\n r = ''\n if type(obj) not in [dict, list, map]:\n ref_url = re.findall('\\\\((http.*?)\\\\)', obj)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', obj)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n return acc\n if acc:\n if flag == 0:\n return acc + '\\n'\n else:\n return acc + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif flag == 0:\n return ',,\"' + url_title + ',' + t + '\"' + '\\n'\n else:\n return ',' + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif v_obj == list:\n if obj:\n return obj_rec(obj[1:], t, flag, obj_rec(obj[0], t, 1, acc))\n else:\n return acc\n elif v_obj == dict:\n if bool(obj):\n for o in obj:\n k = o\n oo = obj[o]\n if type(oo) in [list, dict]:\n r = obj_rec(oo, t + ',' + o, 1, acc)\n acc = ''\n else:\n ref_url = re.findall('\\\\((http.*?)\\\\)', oo)\n ref_title = re.findall('\\\\[[^\\\\[\\\\]]*\\\\]', oo)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title) - 1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n if not acc:\n sep = ','\n else:\n sep = ''\n if not url:\n r = sep + url_title + ',' + '\"' + t + ',' + k + '\",\\n'\n break\n del obj[k]\n if not obj:\n sep = '\\n'\n else:\n sep = ''\n return obj_rec(obj, t, flag, acc + r + sep)\n else:\n return acc[:-1]\n\n\nitemlist = []\nfor o in json_data:\n itemlist.append(obj_rec(json_data[o], o.split('(', 1)[0])[1:])\nwith open(output_file, 'w') as outfile:\n outfile.writelines([('%s\\n' % item) for item in itemlist])\n",
"step-5": "# Written by Jagannath Bilgi <jsbilgi@yahoo.com>\n\nimport sys\nimport json\nimport re\n\n\"\"\"\nProgram accepts *.md document and converts to csv in required format\n\nProgram parse line by line and uses recursive method to traverse from leaf to root. \nSingle turn object (string, int etc) is used as point of return from recursion.\n\n\n\"\"\"\n\ndefault_input_file = ''\ndefault_output_file = ''\n\nno_of_parameters = len(sys.argv)\n\nif no_of_parameters == 1:\n f = open('awesome-transform.param')\n for lno, fname in enumerate(f):\n if lno == 0:\n default_input_file = fname.rstrip()\n else:\n default_output_file = fname.rstrip()\n f.close()\n\nif no_of_parameters < 2:\n input_file = default_input_file\nelse:\n input_file = sys.argv[1]\n\nif no_of_parameters < 3:\n output_file = default_output_file\nelse:\n output_file = sys.argv[2]\n\nwith open(input_file) as f:\n json_data = json.load(f)\n\ndef obj_rec(obj, t, flag=0,acc=''):\n v_obj = type(obj)\n r = ''\n if type(obj) not in [dict, list, map]:\n ref_url = re.findall(r'\\((http.*?)\\)', obj)\n ref_title = re.findall(r'\\[[^\\[\\]]*\\]', obj)\n if ref_url :\n url = ref_url[len(ref_url)-1].strip('[]')\n title = ref_title[len(ref_title)-1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n return acc\n\n if acc:\n if flag == 0:\n return acc + '\\n'\n else:\n return acc + url_title + ',' + '\"' + t + '\"' + '\\n'\n else:\n if flag == 0:\n return ',,\"' + url_title + ',' + t + '\"' + '\\n'\n else:\n return ',' + url_title + ',' + '\"' + t + '\"' + '\\n'\n elif v_obj == list:\n if obj :\n return obj_rec(obj[1:], t, flag, obj_rec(obj[0], t , 1, acc))\n else:\n return acc\n elif v_obj == dict:\n if bool(obj):\n for o in obj:\n k = o\n oo = obj[o]\n if type(oo) in [list,dict]:\n r = obj_rec(oo, t + ',' + o, 1, acc)\n acc = \"\"\n else:\n ref_url = re.findall(r'\\((http.*?)\\)', oo)\n ref_title = re.findall(r'\\[[^\\[\\]]*\\]', oo)\n if ref_url:\n url = ref_url[len(ref_url) - 1].strip('[]')\n title = ref_title[len(ref_title)-1].strip('[]')\n url_title = title + ',' + url\n else:\n url = ''\n title = ''\n url_title = title + ',' + url\n if not acc:\n sep = ','\n else:\n sep = ''\n if not url:\n r = sep + url_title + ',' + '\"' + t + ',' + k + '\",\\n'\n break\n del obj[k]\n if not obj:\n sep = '\\n'\n else:\n sep = ''\n return obj_rec(obj, t, flag, acc + r + sep)\n else:\n return acc[:-1]\nitemlist = []\nfor o in json_data:\n itemlist.append((obj_rec(json_data[o], o.split(\"(\",1)[0])[1:]))\n\nwith open(output_file, 'w') as outfile:\n outfile.writelines([\"%s\\n\" % item for item in itemlist])",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from multiprocess.managers import BaseManager
from linphonebase import LinphoneBase
class MyManager(BaseManager):
pass
MyManager.register('LinphoneBase', LinphoneBase)
manager = MyManager()
manager.start()
linphoneBase = manager.LinphoneBase()
|
normal
|
{
"blob_id": "3bb25cedc29f9063046329db1c00e7d9e10ce1cc",
"index": 5089,
"step-1": "<mask token>\n\n\nclass MyManager(BaseManager):\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyManager(BaseManager):\n pass\n\n\nMyManager.register('LinphoneBase', LinphoneBase)\n<mask token>\nmanager.start()\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MyManager(BaseManager):\n pass\n\n\nMyManager.register('LinphoneBase', LinphoneBase)\nmanager = MyManager()\nmanager.start()\nlinphoneBase = manager.LinphoneBase()\n",
"step-4": "from multiprocess.managers import BaseManager\nfrom linphonebase import LinphoneBase\n\n\nclass MyManager(BaseManager):\n pass\n\n\nMyManager.register('LinphoneBase', LinphoneBase)\nmanager = MyManager()\nmanager.start()\nlinphoneBase = manager.LinphoneBase()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: UTF-8 -*-\n\nfrom multiprocess.managers import BaseManager\nfrom linphonebase import LinphoneBase\n\nclass MyManager(BaseManager):\n pass\n\nMyManager.register('LinphoneBase', LinphoneBase)\n\nmanager = MyManager()\nmanager.start()\nlinphoneBase = manager.LinphoneBase()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class SyncTestCase(tests.unittest.HomeserverTestCase):
<|reserved_special_token_0|>
servlets = [admin.register_servlets, knock.register_servlets, login.
register_servlets, room.register_servlets]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer
) ->None:
self.sync_handler = self.hs.get_sync_handler()
self.store = self.hs.get_datastores().main
self.auth_blocking = self.hs.get_auth_blocking()
def test_wait_for_sync_for_user_auth_blocking(self) ->None:
user_id1 = '@user1:test'
user_id2 = '@user2:test'
sync_config = generate_sync_config(user_id1)
requester = create_requester(user_id1)
self.reactor.advance(100)
self.auth_blocking._limit_usage_by_mau = True
self.auth_blocking._max_mau_value = 1
self.get_success(self.store.upsert_monthly_active_user(user_id1))
self.get_success(self.sync_handler.wait_for_sync_for_user(requester,
sync_config))
self.auth_blocking._hs_disabled = True
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
self.auth_blocking._hs_disabled = False
sync_config = generate_sync_config(user_id2)
requester = create_requester(user_id2)
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
def test_unknown_room_version(self) ->None:
"""
A room with an unknown room version should not break sync (and should be excluded).
"""
inviter = self.register_user('creator', 'pass', admin=True)
inviter_tok = self.login('@creator:test', 'pass')
user = self.register_user('user', 'pass')
tok = self.login('user', 'pass')
requester = create_requester(user)
initial_result = self.get_success(self.sync_handler.
wait_for_sync_for_user(requester, sync_config=
generate_sync_config(user, device_id='dev')))
joined_room = self.helper.create_room_as(user, tok=tok)
invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)
self.helper.invite(invite_room, targ=user, tok=inviter_tok)
knock_room = self.helper.create_room_as(inviter, room_version=
RoomVersions.V7.identifier, tok=inviter_tok)
self.helper.send_state(knock_room, EventTypes.JoinRules, {
'join_rule': JoinRules.KNOCK}, tok=inviter_tok)
channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %
(knock_room,), b'{}', tok)
self.assertEqual(200, channel.code, channel.result)
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
for room_id in (joined_room, invite_room, knock_room):
self.get_success(self.hs.get_datastores().main.db_pool.
simple_update('rooms', keyvalues={'room_id': room_id},
updatevalues={'room_version': 'unknown-room-version'}, desc
='updated-room-version'))
self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()
self.store.get_rooms_for_user.invalidate_all()
self.store._get_event_cache.clear()
self.store._event_ref.clear()
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
def test_ban_wins_race_with_join(self) ->None:
"""Rooms shouldn't appear under "joined" if a join loses a race to a ban.
A complicated edge case. Imagine the following scenario:
* you attempt to join a room
* racing with that is a ban which comes in over federation, which ends up with
an earlier stream_ordering than the join.
* you get a sync response with a sync token which is _after_ the ban, but before
the join
* now your join lands; it is a valid event because its `prev_event`s predate the
ban, but will not make it into current_state_events (because bans win over
joins in state res, essentially).
* When we do a sync from the incremental sync, the only event in the timeline
is your join ... and yet you aren't joined.
The ban coming in over federation isn't crucial for this behaviour; the key
requirements are:
1. the homeserver generates a join event with prev_events that precede the ban
(so that it passes the "are you banned" test)
2. the join event has a stream_ordering after that of the ban.
We use monkeypatching to artificially trigger condition (1).
"""
owner = self.register_user('alice', 'password')
owner_tok = self.login(owner, 'password')
room_id = self.helper.create_room_as(owner, is_public=True, tok=
owner_tok)
alice_sync_result: SyncResult = self.get_success(self.sync_handler.
wait_for_sync_for_user(create_requester(owner),
generate_sync_config(owner)))
self.assertEqual(len(alice_sync_result.joined), 1)
self.assertEqual(alice_sync_result.joined[0].room_id, room_id)
last_room_creation_event_id = alice_sync_result.joined[0
].timeline.events[-1].event_id
eve = self.register_user('eve', 'password')
eve_token = self.login(eve, 'password')
self.helper.ban(room_id, owner, eve, tok=owner_tok)
eve_requester = create_requester(eve)
eve_sync_config = generate_sync_config(eve)
eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler
.wait_for_sync_for_user(eve_requester, eve_sync_config))
self.assertEqual(eve_sync_after_ban.joined, [])
mocked_get_prev_events = patch.object(self.hs.get_datastores().main,
'get_prev_events_for_room', new_callable=AsyncMock,
return_value=[last_room_creation_event_id])
with mocked_get_prev_events:
self.helper.join(room_id, eve, tok=eve_token)
eve_incremental_sync_after_join: SyncResult = self.get_success(self
.sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=eve_sync_after_ban.next_batch))
self.assertEqual(eve_incremental_sync_after_join.joined, [])
eve_initial_sync_after_join: SyncResult = self.get_success(self.
sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=None))
self.assertEqual(eve_initial_sync_after_join.joined, [])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SyncTestCase(tests.unittest.HomeserverTestCase):
"""Tests Sync Handler."""
servlets = [admin.register_servlets, knock.register_servlets, login.
register_servlets, room.register_servlets]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer
) ->None:
self.sync_handler = self.hs.get_sync_handler()
self.store = self.hs.get_datastores().main
self.auth_blocking = self.hs.get_auth_blocking()
def test_wait_for_sync_for_user_auth_blocking(self) ->None:
user_id1 = '@user1:test'
user_id2 = '@user2:test'
sync_config = generate_sync_config(user_id1)
requester = create_requester(user_id1)
self.reactor.advance(100)
self.auth_blocking._limit_usage_by_mau = True
self.auth_blocking._max_mau_value = 1
self.get_success(self.store.upsert_monthly_active_user(user_id1))
self.get_success(self.sync_handler.wait_for_sync_for_user(requester,
sync_config))
self.auth_blocking._hs_disabled = True
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
self.auth_blocking._hs_disabled = False
sync_config = generate_sync_config(user_id2)
requester = create_requester(user_id2)
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
def test_unknown_room_version(self) ->None:
"""
A room with an unknown room version should not break sync (and should be excluded).
"""
inviter = self.register_user('creator', 'pass', admin=True)
inviter_tok = self.login('@creator:test', 'pass')
user = self.register_user('user', 'pass')
tok = self.login('user', 'pass')
requester = create_requester(user)
initial_result = self.get_success(self.sync_handler.
wait_for_sync_for_user(requester, sync_config=
generate_sync_config(user, device_id='dev')))
joined_room = self.helper.create_room_as(user, tok=tok)
invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)
self.helper.invite(invite_room, targ=user, tok=inviter_tok)
knock_room = self.helper.create_room_as(inviter, room_version=
RoomVersions.V7.identifier, tok=inviter_tok)
self.helper.send_state(knock_room, EventTypes.JoinRules, {
'join_rule': JoinRules.KNOCK}, tok=inviter_tok)
channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %
(knock_room,), b'{}', tok)
self.assertEqual(200, channel.code, channel.result)
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
for room_id in (joined_room, invite_room, knock_room):
self.get_success(self.hs.get_datastores().main.db_pool.
simple_update('rooms', keyvalues={'room_id': room_id},
updatevalues={'room_version': 'unknown-room-version'}, desc
='updated-room-version'))
self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()
self.store.get_rooms_for_user.invalidate_all()
self.store._get_event_cache.clear()
self.store._event_ref.clear()
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
def test_ban_wins_race_with_join(self) ->None:
"""Rooms shouldn't appear under "joined" if a join loses a race to a ban.
A complicated edge case. Imagine the following scenario:
* you attempt to join a room
* racing with that is a ban which comes in over federation, which ends up with
an earlier stream_ordering than the join.
* you get a sync response with a sync token which is _after_ the ban, but before
the join
* now your join lands; it is a valid event because its `prev_event`s predate the
ban, but will not make it into current_state_events (because bans win over
joins in state res, essentially).
* When we do a sync from the incremental sync, the only event in the timeline
is your join ... and yet you aren't joined.
The ban coming in over federation isn't crucial for this behaviour; the key
requirements are:
1. the homeserver generates a join event with prev_events that precede the ban
(so that it passes the "are you banned" test)
2. the join event has a stream_ordering after that of the ban.
We use monkeypatching to artificially trigger condition (1).
"""
owner = self.register_user('alice', 'password')
owner_tok = self.login(owner, 'password')
room_id = self.helper.create_room_as(owner, is_public=True, tok=
owner_tok)
alice_sync_result: SyncResult = self.get_success(self.sync_handler.
wait_for_sync_for_user(create_requester(owner),
generate_sync_config(owner)))
self.assertEqual(len(alice_sync_result.joined), 1)
self.assertEqual(alice_sync_result.joined[0].room_id, room_id)
last_room_creation_event_id = alice_sync_result.joined[0
].timeline.events[-1].event_id
eve = self.register_user('eve', 'password')
eve_token = self.login(eve, 'password')
self.helper.ban(room_id, owner, eve, tok=owner_tok)
eve_requester = create_requester(eve)
eve_sync_config = generate_sync_config(eve)
eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler
.wait_for_sync_for_user(eve_requester, eve_sync_config))
self.assertEqual(eve_sync_after_ban.joined, [])
mocked_get_prev_events = patch.object(self.hs.get_datastores().main,
'get_prev_events_for_room', new_callable=AsyncMock,
return_value=[last_room_creation_event_id])
with mocked_get_prev_events:
self.helper.join(room_id, eve, tok=eve_token)
eve_incremental_sync_after_join: SyncResult = self.get_success(self
.sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=eve_sync_after_ban.next_batch))
self.assertEqual(eve_incremental_sync_after_join.joined, [])
eve_initial_sync_after_join: SyncResult = self.get_success(self.
sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=None))
self.assertEqual(eve_initial_sync_after_join.joined, [])
<|reserved_special_token_0|>
def generate_sync_config(user_id: str, device_id: Optional[str]='device_id'
) ->SyncConfig:
"""Generate a sync config (with a unique request key)."""
global _request_key
_request_key += 1
return SyncConfig(user=UserID.from_string(user_id), filter_collection=
Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,
request_key=('request_key', _request_key), device_id=device_id)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SyncTestCase(tests.unittest.HomeserverTestCase):
"""Tests Sync Handler."""
servlets = [admin.register_servlets, knock.register_servlets, login.
register_servlets, room.register_servlets]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer
) ->None:
self.sync_handler = self.hs.get_sync_handler()
self.store = self.hs.get_datastores().main
self.auth_blocking = self.hs.get_auth_blocking()
def test_wait_for_sync_for_user_auth_blocking(self) ->None:
user_id1 = '@user1:test'
user_id2 = '@user2:test'
sync_config = generate_sync_config(user_id1)
requester = create_requester(user_id1)
self.reactor.advance(100)
self.auth_blocking._limit_usage_by_mau = True
self.auth_blocking._max_mau_value = 1
self.get_success(self.store.upsert_monthly_active_user(user_id1))
self.get_success(self.sync_handler.wait_for_sync_for_user(requester,
sync_config))
self.auth_blocking._hs_disabled = True
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
self.auth_blocking._hs_disabled = False
sync_config = generate_sync_config(user_id2)
requester = create_requester(user_id2)
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
def test_unknown_room_version(self) ->None:
"""
A room with an unknown room version should not break sync (and should be excluded).
"""
inviter = self.register_user('creator', 'pass', admin=True)
inviter_tok = self.login('@creator:test', 'pass')
user = self.register_user('user', 'pass')
tok = self.login('user', 'pass')
requester = create_requester(user)
initial_result = self.get_success(self.sync_handler.
wait_for_sync_for_user(requester, sync_config=
generate_sync_config(user, device_id='dev')))
joined_room = self.helper.create_room_as(user, tok=tok)
invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)
self.helper.invite(invite_room, targ=user, tok=inviter_tok)
knock_room = self.helper.create_room_as(inviter, room_version=
RoomVersions.V7.identifier, tok=inviter_tok)
self.helper.send_state(knock_room, EventTypes.JoinRules, {
'join_rule': JoinRules.KNOCK}, tok=inviter_tok)
channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %
(knock_room,), b'{}', tok)
self.assertEqual(200, channel.code, channel.result)
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
for room_id in (joined_room, invite_room, knock_room):
self.get_success(self.hs.get_datastores().main.db_pool.
simple_update('rooms', keyvalues={'room_id': room_id},
updatevalues={'room_version': 'unknown-room-version'}, desc
='updated-room-version'))
self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()
self.store.get_rooms_for_user.invalidate_all()
self.store._get_event_cache.clear()
self.store._event_ref.clear()
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
def test_ban_wins_race_with_join(self) ->None:
"""Rooms shouldn't appear under "joined" if a join loses a race to a ban.
A complicated edge case. Imagine the following scenario:
* you attempt to join a room
* racing with that is a ban which comes in over federation, which ends up with
an earlier stream_ordering than the join.
* you get a sync response with a sync token which is _after_ the ban, but before
the join
* now your join lands; it is a valid event because its `prev_event`s predate the
ban, but will not make it into current_state_events (because bans win over
joins in state res, essentially).
* When we do a sync from the incremental sync, the only event in the timeline
is your join ... and yet you aren't joined.
The ban coming in over federation isn't crucial for this behaviour; the key
requirements are:
1. the homeserver generates a join event with prev_events that precede the ban
(so that it passes the "are you banned" test)
2. the join event has a stream_ordering after that of the ban.
We use monkeypatching to artificially trigger condition (1).
"""
owner = self.register_user('alice', 'password')
owner_tok = self.login(owner, 'password')
room_id = self.helper.create_room_as(owner, is_public=True, tok=
owner_tok)
alice_sync_result: SyncResult = self.get_success(self.sync_handler.
wait_for_sync_for_user(create_requester(owner),
generate_sync_config(owner)))
self.assertEqual(len(alice_sync_result.joined), 1)
self.assertEqual(alice_sync_result.joined[0].room_id, room_id)
last_room_creation_event_id = alice_sync_result.joined[0
].timeline.events[-1].event_id
eve = self.register_user('eve', 'password')
eve_token = self.login(eve, 'password')
self.helper.ban(room_id, owner, eve, tok=owner_tok)
eve_requester = create_requester(eve)
eve_sync_config = generate_sync_config(eve)
eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler
.wait_for_sync_for_user(eve_requester, eve_sync_config))
self.assertEqual(eve_sync_after_ban.joined, [])
mocked_get_prev_events = patch.object(self.hs.get_datastores().main,
'get_prev_events_for_room', new_callable=AsyncMock,
return_value=[last_room_creation_event_id])
with mocked_get_prev_events:
self.helper.join(room_id, eve, tok=eve_token)
eve_incremental_sync_after_join: SyncResult = self.get_success(self
.sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=eve_sync_after_ban.next_batch))
self.assertEqual(eve_incremental_sync_after_join.joined, [])
eve_initial_sync_after_join: SyncResult = self.get_success(self.
sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=None))
self.assertEqual(eve_initial_sync_after_join.joined, [])
_request_key = 0
def generate_sync_config(user_id: str, device_id: Optional[str]='device_id'
) ->SyncConfig:
"""Generate a sync config (with a unique request key)."""
global _request_key
_request_key += 1
return SyncConfig(user=UserID.from_string(user_id), filter_collection=
Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,
request_key=('request_key', _request_key), device_id=device_id)
<|reserved_special_token_1|>
from typing import Optional
from unittest.mock import AsyncMock, Mock, patch
from twisted.test.proto_helpers import MemoryReactor
from synapse.api.constants import EventTypes, JoinRules
from synapse.api.errors import Codes, ResourceLimitError
from synapse.api.filtering import Filtering
from synapse.api.room_versions import RoomVersions
from synapse.handlers.sync import SyncConfig, SyncResult
from synapse.rest import admin
from synapse.rest.client import knock, login, room
from synapse.server import HomeServer
from synapse.types import UserID, create_requester
from synapse.util import Clock
import tests.unittest
import tests.utils
class SyncTestCase(tests.unittest.HomeserverTestCase):
"""Tests Sync Handler."""
servlets = [admin.register_servlets, knock.register_servlets, login.
register_servlets, room.register_servlets]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer
) ->None:
self.sync_handler = self.hs.get_sync_handler()
self.store = self.hs.get_datastores().main
self.auth_blocking = self.hs.get_auth_blocking()
def test_wait_for_sync_for_user_auth_blocking(self) ->None:
user_id1 = '@user1:test'
user_id2 = '@user2:test'
sync_config = generate_sync_config(user_id1)
requester = create_requester(user_id1)
self.reactor.advance(100)
self.auth_blocking._limit_usage_by_mau = True
self.auth_blocking._max_mau_value = 1
self.get_success(self.store.upsert_monthly_active_user(user_id1))
self.get_success(self.sync_handler.wait_for_sync_for_user(requester,
sync_config))
self.auth_blocking._hs_disabled = True
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
self.auth_blocking._hs_disabled = False
sync_config = generate_sync_config(user_id2)
requester = create_requester(user_id2)
e = self.get_failure(self.sync_handler.wait_for_sync_for_user(
requester, sync_config), ResourceLimitError)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
def test_unknown_room_version(self) ->None:
"""
A room with an unknown room version should not break sync (and should be excluded).
"""
inviter = self.register_user('creator', 'pass', admin=True)
inviter_tok = self.login('@creator:test', 'pass')
user = self.register_user('user', 'pass')
tok = self.login('user', 'pass')
requester = create_requester(user)
initial_result = self.get_success(self.sync_handler.
wait_for_sync_for_user(requester, sync_config=
generate_sync_config(user, device_id='dev')))
joined_room = self.helper.create_room_as(user, tok=tok)
invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)
self.helper.invite(invite_room, targ=user, tok=inviter_tok)
knock_room = self.helper.create_room_as(inviter, room_version=
RoomVersions.V7.identifier, tok=inviter_tok)
self.helper.send_state(knock_room, EventTypes.JoinRules, {
'join_rule': JoinRules.KNOCK}, tok=inviter_tok)
channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %
(knock_room,), b'{}', tok)
self.assertEqual(200, channel.code, channel.result)
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
for room_id in (joined_room, invite_room, knock_room):
self.get_success(self.hs.get_datastores().main.db_pool.
simple_update('rooms', keyvalues={'room_id': room_id},
updatevalues={'room_version': 'unknown-room-version'}, desc
='updated-room-version'))
self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()
self.store.get_rooms_for_user.invalidate_all()
self.store._get_event_cache.clear()
self.store._event_ref.clear()
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
result = self.get_success(self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id=
'dev'), since_token=initial_result.next_batch))
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
def test_ban_wins_race_with_join(self) ->None:
"""Rooms shouldn't appear under "joined" if a join loses a race to a ban.
A complicated edge case. Imagine the following scenario:
* you attempt to join a room
* racing with that is a ban which comes in over federation, which ends up with
an earlier stream_ordering than the join.
* you get a sync response with a sync token which is _after_ the ban, but before
the join
* now your join lands; it is a valid event because its `prev_event`s predate the
ban, but will not make it into current_state_events (because bans win over
joins in state res, essentially).
* When we do a sync from the incremental sync, the only event in the timeline
is your join ... and yet you aren't joined.
The ban coming in over federation isn't crucial for this behaviour; the key
requirements are:
1. the homeserver generates a join event with prev_events that precede the ban
(so that it passes the "are you banned" test)
2. the join event has a stream_ordering after that of the ban.
We use monkeypatching to artificially trigger condition (1).
"""
owner = self.register_user('alice', 'password')
owner_tok = self.login(owner, 'password')
room_id = self.helper.create_room_as(owner, is_public=True, tok=
owner_tok)
alice_sync_result: SyncResult = self.get_success(self.sync_handler.
wait_for_sync_for_user(create_requester(owner),
generate_sync_config(owner)))
self.assertEqual(len(alice_sync_result.joined), 1)
self.assertEqual(alice_sync_result.joined[0].room_id, room_id)
last_room_creation_event_id = alice_sync_result.joined[0
].timeline.events[-1].event_id
eve = self.register_user('eve', 'password')
eve_token = self.login(eve, 'password')
self.helper.ban(room_id, owner, eve, tok=owner_tok)
eve_requester = create_requester(eve)
eve_sync_config = generate_sync_config(eve)
eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler
.wait_for_sync_for_user(eve_requester, eve_sync_config))
self.assertEqual(eve_sync_after_ban.joined, [])
mocked_get_prev_events = patch.object(self.hs.get_datastores().main,
'get_prev_events_for_room', new_callable=AsyncMock,
return_value=[last_room_creation_event_id])
with mocked_get_prev_events:
self.helper.join(room_id, eve, tok=eve_token)
eve_incremental_sync_after_join: SyncResult = self.get_success(self
.sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=eve_sync_after_ban.next_batch))
self.assertEqual(eve_incremental_sync_after_join.joined, [])
eve_initial_sync_after_join: SyncResult = self.get_success(self.
sync_handler.wait_for_sync_for_user(eve_requester,
eve_sync_config, since_token=None))
self.assertEqual(eve_initial_sync_after_join.joined, [])
_request_key = 0
def generate_sync_config(user_id: str, device_id: Optional[str]='device_id'
) ->SyncConfig:
"""Generate a sync config (with a unique request key)."""
global _request_key
_request_key += 1
return SyncConfig(user=UserID.from_string(user_id), filter_collection=
Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,
request_key=('request_key', _request_key), device_id=device_id)
<|reserved_special_token_1|>
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from unittest.mock import AsyncMock, Mock, patch
from twisted.test.proto_helpers import MemoryReactor
from synapse.api.constants import EventTypes, JoinRules
from synapse.api.errors import Codes, ResourceLimitError
from synapse.api.filtering import Filtering
from synapse.api.room_versions import RoomVersions
from synapse.handlers.sync import SyncConfig, SyncResult
from synapse.rest import admin
from synapse.rest.client import knock, login, room
from synapse.server import HomeServer
from synapse.types import UserID, create_requester
from synapse.util import Clock
import tests.unittest
import tests.utils
class SyncTestCase(tests.unittest.HomeserverTestCase):
"""Tests Sync Handler."""
servlets = [
admin.register_servlets,
knock.register_servlets,
login.register_servlets,
room.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
self.sync_handler = self.hs.get_sync_handler()
self.store = self.hs.get_datastores().main
# AuthBlocking reads from the hs' config on initialization. We need to
# modify its config instead of the hs'
self.auth_blocking = self.hs.get_auth_blocking()
def test_wait_for_sync_for_user_auth_blocking(self) -> None:
user_id1 = "@user1:test"
user_id2 = "@user2:test"
sync_config = generate_sync_config(user_id1)
requester = create_requester(user_id1)
self.reactor.advance(100) # So we get not 0 time
self.auth_blocking._limit_usage_by_mau = True
self.auth_blocking._max_mau_value = 1
# Check that the happy case does not throw errors
self.get_success(self.store.upsert_monthly_active_user(user_id1))
self.get_success(
self.sync_handler.wait_for_sync_for_user(requester, sync_config)
)
# Test that global lock works
self.auth_blocking._hs_disabled = True
e = self.get_failure(
self.sync_handler.wait_for_sync_for_user(requester, sync_config),
ResourceLimitError,
)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
self.auth_blocking._hs_disabled = False
sync_config = generate_sync_config(user_id2)
requester = create_requester(user_id2)
e = self.get_failure(
self.sync_handler.wait_for_sync_for_user(requester, sync_config),
ResourceLimitError,
)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
def test_unknown_room_version(self) -> None:
"""
A room with an unknown room version should not break sync (and should be excluded).
"""
inviter = self.register_user("creator", "pass", admin=True)
inviter_tok = self.login("@creator:test", "pass")
user = self.register_user("user", "pass")
tok = self.login("user", "pass")
# Do an initial sync on a different device.
requester = create_requester(user)
initial_result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id="dev")
)
)
# Create a room as the user.
joined_room = self.helper.create_room_as(user, tok=tok)
# Invite the user to the room as someone else.
invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)
self.helper.invite(invite_room, targ=user, tok=inviter_tok)
knock_room = self.helper.create_room_as(
inviter, room_version=RoomVersions.V7.identifier, tok=inviter_tok
)
self.helper.send_state(
knock_room,
EventTypes.JoinRules,
{"join_rule": JoinRules.KNOCK},
tok=inviter_tok,
)
channel = self.make_request(
"POST",
"/_matrix/client/r0/knock/%s" % (knock_room,),
b"{}",
tok,
)
self.assertEqual(200, channel.code, channel.result)
# The rooms should appear in the sync response.
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)
)
)
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
# Test a incremental sync (by providing a since_token).
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester,
sync_config=generate_sync_config(user, device_id="dev"),
since_token=initial_result.next_batch,
)
)
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
# Poke the database and update the room version to an unknown one.
for room_id in (joined_room, invite_room, knock_room):
self.get_success(
self.hs.get_datastores().main.db_pool.simple_update(
"rooms",
keyvalues={"room_id": room_id},
updatevalues={"room_version": "unknown-room-version"},
desc="updated-room-version",
)
)
# Blow away caches (supported room versions can only change due to a restart).
self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()
self.store.get_rooms_for_user.invalidate_all()
self.store._get_event_cache.clear()
self.store._event_ref.clear()
# The rooms should be excluded from the sync response.
# Get a new request key.
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)
)
)
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
# The rooms should also not be in an incremental sync.
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester,
sync_config=generate_sync_config(user, device_id="dev"),
since_token=initial_result.next_batch,
)
)
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
def test_ban_wins_race_with_join(self) -> None:
"""Rooms shouldn't appear under "joined" if a join loses a race to a ban.
A complicated edge case. Imagine the following scenario:
* you attempt to join a room
* racing with that is a ban which comes in over federation, which ends up with
an earlier stream_ordering than the join.
* you get a sync response with a sync token which is _after_ the ban, but before
the join
* now your join lands; it is a valid event because its `prev_event`s predate the
ban, but will not make it into current_state_events (because bans win over
joins in state res, essentially).
* When we do a sync from the incremental sync, the only event in the timeline
is your join ... and yet you aren't joined.
The ban coming in over federation isn't crucial for this behaviour; the key
requirements are:
1. the homeserver generates a join event with prev_events that precede the ban
(so that it passes the "are you banned" test)
2. the join event has a stream_ordering after that of the ban.
We use monkeypatching to artificially trigger condition (1).
"""
# A local user Alice creates a room.
owner = self.register_user("alice", "password")
owner_tok = self.login(owner, "password")
room_id = self.helper.create_room_as(owner, is_public=True, tok=owner_tok)
# Do a sync as Alice to get the latest event in the room.
alice_sync_result: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(
create_requester(owner), generate_sync_config(owner)
)
)
self.assertEqual(len(alice_sync_result.joined), 1)
self.assertEqual(alice_sync_result.joined[0].room_id, room_id)
last_room_creation_event_id = (
alice_sync_result.joined[0].timeline.events[-1].event_id
)
# Eve, a ne'er-do-well, registers.
eve = self.register_user("eve", "password")
eve_token = self.login(eve, "password")
# Alice preemptively bans Eve.
self.helper.ban(room_id, owner, eve, tok=owner_tok)
# Eve syncs.
eve_requester = create_requester(eve)
eve_sync_config = generate_sync_config(eve)
eve_sync_after_ban: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(eve_requester, eve_sync_config)
)
# Sanity check this sync result. We shouldn't be joined to the room.
self.assertEqual(eve_sync_after_ban.joined, [])
# Eve tries to join the room. We monkey patch the internal logic which selects
# the prev_events used when creating the join event, such that the ban does not
# precede the join.
mocked_get_prev_events = patch.object(
self.hs.get_datastores().main,
"get_prev_events_for_room",
new_callable=AsyncMock,
return_value=[last_room_creation_event_id],
)
with mocked_get_prev_events:
self.helper.join(room_id, eve, tok=eve_token)
# Eve makes a second, incremental sync.
eve_incremental_sync_after_join: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(
eve_requester,
eve_sync_config,
since_token=eve_sync_after_ban.next_batch,
)
)
# Eve should not see herself as joined to the room.
self.assertEqual(eve_incremental_sync_after_join.joined, [])
# If we did a third initial sync, we should _still_ see eve is not joined to the room.
eve_initial_sync_after_join: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(
eve_requester,
eve_sync_config,
since_token=None,
)
)
self.assertEqual(eve_initial_sync_after_join.joined, [])
_request_key = 0
def generate_sync_config(
user_id: str, device_id: Optional[str] = "device_id"
) -> SyncConfig:
"""Generate a sync config (with a unique request key)."""
global _request_key
_request_key += 1
return SyncConfig(
user=UserID.from_string(user_id),
filter_collection=Filtering(Mock()).DEFAULT_FILTER_COLLECTION,
is_guest=False,
request_key=("request_key", _request_key),
device_id=device_id,
)
|
flexible
|
{
"blob_id": "fc5b9117ecf56401a888e2b6a5e244f9ab115e41",
"index": 3999,
"step-1": "<mask token>\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n <mask token>\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n<mask token>\n\n\ndef generate_sync_config(user_id: str, device_id: Optional[str]='device_id'\n ) ->SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(user=UserID.from_string(user_id), filter_collection=\n Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,\n request_key=('request_key', _request_key), device_id=device_id)\n",
"step-3": "<mask token>\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n_request_key = 0\n\n\ndef generate_sync_config(user_id: str, device_id: Optional[str]='device_id'\n ) ->SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(user=UserID.from_string(user_id), filter_collection=\n Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,\n request_key=('request_key', _request_key), device_id=device_id)\n",
"step-4": "from typing import Optional\nfrom unittest.mock import AsyncMock, Mock, patch\nfrom twisted.test.proto_helpers import MemoryReactor\nfrom synapse.api.constants import EventTypes, JoinRules\nfrom synapse.api.errors import Codes, ResourceLimitError\nfrom synapse.api.filtering import Filtering\nfrom synapse.api.room_versions import RoomVersions\nfrom synapse.handlers.sync import SyncConfig, SyncResult\nfrom synapse.rest import admin\nfrom synapse.rest.client import knock, login, room\nfrom synapse.server import HomeServer\nfrom synapse.types import UserID, create_requester\nfrom synapse.util import Clock\nimport tests.unittest\nimport tests.utils\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n_request_key = 0\n\n\ndef generate_sync_config(user_id: str, device_id: Optional[str]='device_id'\n ) ->SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(user=UserID.from_string(user_id), filter_collection=\n Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,\n request_key=('request_key', _request_key), device_id=device_id)\n",
"step-5": "# Copyright 2018 New Vector Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Optional\nfrom unittest.mock import AsyncMock, Mock, patch\n\nfrom twisted.test.proto_helpers import MemoryReactor\n\nfrom synapse.api.constants import EventTypes, JoinRules\nfrom synapse.api.errors import Codes, ResourceLimitError\nfrom synapse.api.filtering import Filtering\nfrom synapse.api.room_versions import RoomVersions\nfrom synapse.handlers.sync import SyncConfig, SyncResult\nfrom synapse.rest import admin\nfrom synapse.rest.client import knock, login, room\nfrom synapse.server import HomeServer\nfrom synapse.types import UserID, create_requester\nfrom synapse.util import Clock\n\nimport tests.unittest\nimport tests.utils\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n\n servlets = [\n admin.register_servlets,\n knock.register_servlets,\n login.register_servlets,\n room.register_servlets,\n ]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n\n # AuthBlocking reads from the hs' config on initialization. We need to\n # modify its config instead of the hs'\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) -> None:\n user_id1 = \"@user1:test\"\n user_id2 = \"@user2:test\"\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n\n self.reactor.advance(100) # So we get not 0 time\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n\n # Check that the happy case does not throw errors\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(\n self.sync_handler.wait_for_sync_for_user(requester, sync_config)\n )\n\n # Test that global lock works\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(\n self.sync_handler.wait_for_sync_for_user(requester, sync_config),\n ResourceLimitError,\n )\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n self.auth_blocking._hs_disabled = False\n\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n\n e = self.get_failure(\n self.sync_handler.wait_for_sync_for_user(requester, sync_config),\n ResourceLimitError,\n )\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) -> None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user(\"creator\", \"pass\", admin=True)\n inviter_tok = self.login(\"@creator:test\", \"pass\")\n\n user = self.register_user(\"user\", \"pass\")\n tok = self.login(\"user\", \"pass\")\n\n # Do an initial sync on a different device.\n requester = create_requester(user)\n initial_result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\"dev\")\n )\n )\n\n # Create a room as the user.\n joined_room = self.helper.create_room_as(user, tok=tok)\n\n # Invite the user to the room as someone else.\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n\n knock_room = self.helper.create_room_as(\n inviter, room_version=RoomVersions.V7.identifier, tok=inviter_tok\n )\n self.helper.send_state(\n knock_room,\n EventTypes.JoinRules,\n {\"join_rule\": JoinRules.KNOCK},\n tok=inviter_tok,\n )\n channel = self.make_request(\n \"POST\",\n \"/_matrix/client/r0/knock/%s\" % (knock_room,),\n b\"{}\",\n tok,\n )\n self.assertEqual(200, channel.code, channel.result)\n\n # The rooms should appear in the sync response.\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)\n )\n )\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n\n # Test a incremental sync (by providing a since_token).\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester,\n sync_config=generate_sync_config(user, device_id=\"dev\"),\n since_token=initial_result.next_batch,\n )\n )\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n\n # Poke the database and update the room version to an unknown one.\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(\n self.hs.get_datastores().main.db_pool.simple_update(\n \"rooms\",\n keyvalues={\"room_id\": room_id},\n updatevalues={\"room_version\": \"unknown-room-version\"},\n desc=\"updated-room-version\",\n )\n )\n\n # Blow away caches (supported room versions can only change due to a restart).\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n\n # The rooms should be excluded from the sync response.\n # Get a new request key.\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)\n )\n )\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n # The rooms should also not be in an incremental sync.\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester,\n sync_config=generate_sync_config(user, device_id=\"dev\"),\n since_token=initial_result.next_batch,\n )\n )\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) -> None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n # A local user Alice creates a room.\n owner = self.register_user(\"alice\", \"password\")\n owner_tok = self.login(owner, \"password\")\n room_id = self.helper.create_room_as(owner, is_public=True, tok=owner_tok)\n\n # Do a sync as Alice to get the latest event in the room.\n alice_sync_result: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n create_requester(owner), generate_sync_config(owner)\n )\n )\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = (\n alice_sync_result.joined[0].timeline.events[-1].event_id\n )\n\n # Eve, a ne'er-do-well, registers.\n eve = self.register_user(\"eve\", \"password\")\n eve_token = self.login(eve, \"password\")\n\n # Alice preemptively bans Eve.\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n\n # Eve syncs.\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(eve_requester, eve_sync_config)\n )\n\n # Sanity check this sync result. We shouldn't be joined to the room.\n self.assertEqual(eve_sync_after_ban.joined, [])\n\n # Eve tries to join the room. We monkey patch the internal logic which selects\n # the prev_events used when creating the join event, such that the ban does not\n # precede the join.\n mocked_get_prev_events = patch.object(\n self.hs.get_datastores().main,\n \"get_prev_events_for_room\",\n new_callable=AsyncMock,\n return_value=[last_room_creation_event_id],\n )\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n\n # Eve makes a second, incremental sync.\n eve_incremental_sync_after_join: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n eve_requester,\n eve_sync_config,\n since_token=eve_sync_after_ban.next_batch,\n )\n )\n # Eve should not see herself as joined to the room.\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n\n # If we did a third initial sync, we should _still_ see eve is not joined to the room.\n eve_initial_sync_after_join: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n eve_requester,\n eve_sync_config,\n since_token=None,\n )\n )\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n_request_key = 0\n\n\ndef generate_sync_config(\n user_id: str, device_id: Optional[str] = \"device_id\"\n) -> SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(\n user=UserID.from_string(user_id),\n filter_collection=Filtering(Mock()).DEFAULT_FILTER_COLLECTION,\n is_guest=False,\n request_key=(\"request_key\", _request_key),\n device_id=device_id,\n )\n",
"step-ids": [
6,
8,
9,
10,
11
]
}
|
[
6,
8,
9,
10,
11
] |
from collections import deque
class Solution:
def slidingPuzzle(self, board: List[List[int]]) -> int:
def board2str(board: List[List[int]]) -> str:
return ''.join([str(board[i][j]) for i in range(2) for j in range(3)])
start = board2str(board)
bfs = deque([(start, 0)])
visited = {start}
while bfs:
path, step = bfs.popleft()
if path == "123450": return step
p = path.index("0")
x, y = p // 3, p % 3
path = list(path)
for nx, ny in [(0, 1), (1, 0), (0, -1), (-1, 0)]:
tx, ty = x + nx, y + ny
if tx < 0 or tx >= 2 or ty < 0 or ty >= 3: continue
path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[tx * 3 + ty]
path_str = "".join(path)
if path_str not in visited:
bfs.append((path_str, step + 1))
visited.add(path_str)
path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[tx * 3 + ty]
return -1
|
normal
|
{
"blob_id": "dc934f8db4e0c1113e1398b051b58369d909fff8",
"index": 6471,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def slidingPuzzle(self, board: List[List[int]]) ->int:\n\n def board2str(board: List[List[int]]) ->str:\n return ''.join([str(board[i][j]) for i in range(2) for j in\n range(3)])\n start = board2str(board)\n bfs = deque([(start, 0)])\n visited = {start}\n while bfs:\n path, step = bfs.popleft()\n if path == '123450':\n return step\n p = path.index('0')\n x, y = p // 3, p % 3\n path = list(path)\n for nx, ny in [(0, 1), (1, 0), (0, -1), (-1, 0)]:\n tx, ty = x + nx, y + ny\n if tx < 0 or tx >= 2 or ty < 0 or ty >= 3:\n continue\n path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[\n tx * 3 + ty]\n path_str = ''.join(path)\n if path_str not in visited:\n bfs.append((path_str, step + 1))\n visited.add(path_str)\n path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[\n tx * 3 + ty]\n return -1\n",
"step-4": "from collections import deque\n\n\nclass Solution:\n\n def slidingPuzzle(self, board: List[List[int]]) ->int:\n\n def board2str(board: List[List[int]]) ->str:\n return ''.join([str(board[i][j]) for i in range(2) for j in\n range(3)])\n start = board2str(board)\n bfs = deque([(start, 0)])\n visited = {start}\n while bfs:\n path, step = bfs.popleft()\n if path == '123450':\n return step\n p = path.index('0')\n x, y = p // 3, p % 3\n path = list(path)\n for nx, ny in [(0, 1), (1, 0), (0, -1), (-1, 0)]:\n tx, ty = x + nx, y + ny\n if tx < 0 or tx >= 2 or ty < 0 or ty >= 3:\n continue\n path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[\n tx * 3 + ty]\n path_str = ''.join(path)\n if path_str not in visited:\n bfs.append((path_str, step + 1))\n visited.add(path_str)\n path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[\n tx * 3 + ty]\n return -1\n",
"step-5": "from collections import deque\n\n\nclass Solution:\n def slidingPuzzle(self, board: List[List[int]]) -> int:\n def board2str(board: List[List[int]]) -> str:\n return ''.join([str(board[i][j]) for i in range(2) for j in range(3)])\n\n start = board2str(board)\n bfs = deque([(start, 0)])\n visited = {start}\n\n while bfs:\n path, step = bfs.popleft()\n\n if path == \"123450\": return step\n\n p = path.index(\"0\")\n\n x, y = p // 3, p % 3\n\n path = list(path)\n\n for nx, ny in [(0, 1), (1, 0), (0, -1), (-1, 0)]:\n tx, ty = x + nx, y + ny\n\n if tx < 0 or tx >= 2 or ty < 0 or ty >= 3: continue\n\n path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[tx * 3 + ty]\n\n path_str = \"\".join(path)\n if path_str not in visited:\n bfs.append((path_str, step + 1))\n visited.add(path_str)\n\n path[tx * 3 + ty], path[x * 3 + y] = path[x * 3 + y], path[tx * 3 + ty]\n\n return -1\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from Cars import Bmw
from Cars import Audi
from Cars import Nissan
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print('In Sample.py........')
# Import classes from your brand new package
# Create an object of Bmw class & call its method
ModBMW = Bmw.Bmw()
ModBMW.outModels()
# Create an object of Audi class & call its method
ModAudi = Audi.Audi()
ModAudi.outModels()
# Create an object of Nissan class & call its method
ModNissan = Nissan.Nissan()
ModNissan.outModels()
|
normal
|
{
"blob_id": "e15524d7ae87cbf0b10c54ee0bdc613ba589c1a9",
"index": 3812,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n print('In Sample.py........')\n ModBMW = Bmw.Bmw()\n ModBMW.outModels()\n ModAudi = Audi.Audi()\n ModAudi.outModels()\n ModNissan = Nissan.Nissan()\n ModNissan.outModels()\n",
"step-3": "from Cars import Bmw\nfrom Cars import Audi\nfrom Cars import Nissan\nif __name__ == '__main__':\n print('In Sample.py........')\n ModBMW = Bmw.Bmw()\n ModBMW.outModels()\n ModAudi = Audi.Audi()\n ModAudi.outModels()\n ModNissan = Nissan.Nissan()\n ModNissan.outModels()\n",
"step-4": "from Cars import Bmw\nfrom Cars import Audi\nfrom Cars import Nissan\n\n\n\n# Press the green button in the gutter to run the script.\nif __name__ == '__main__':\n print('In Sample.py........')\n\n # Import classes from your brand new package\n\n # Create an object of Bmw class & call its method\n ModBMW = Bmw.Bmw()\n ModBMW.outModels()\n\n # Create an object of Audi class & call its method\n ModAudi = Audi.Audi()\n ModAudi.outModels()\n\n # Create an object of Nissan class & call its method\n ModNissan = Nissan.Nissan()\n ModNissan.outModels()",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class RollBot:
<|reserved_special_token_0|>
def __init__(self):
"""initializes the attributes of the class"""
self.input_last_roll = ''
self.last_roll = []
self.result = 0
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.sort = False
self.adv = False
self.hidden = False
self.dropped_roll = ''
self.d_stats = {}
self.dropped_d_stats = {}
self.result_d_stats = {}
self.art_dict = {}
self.meme_dict = {}
self.hidden_rolls = {}
<|reserved_special_token_0|>
def roll_dice(self, number_of_dice, size_of_dice):
"""Simple function that rolls dice"""
dice = []
for roll in range(int(number_of_dice)):
roll = random.randint(1, int(size_of_dice))
dice.append(roll)
if self.sort is True:
dice.sort()
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
self.sort = False
else:
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
def calculate_roll(self):
"""Function to calculate the sum of the roll"""
for i in self.last_roll:
self.result = int(self.result) + int(i)
self.result = self.result + int(self.modifier + self.modifier_number)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RollBot:
<|reserved_special_token_0|>
def __init__(self):
"""initializes the attributes of the class"""
self.input_last_roll = ''
self.last_roll = []
self.result = 0
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.sort = False
self.adv = False
self.hidden = False
self.dropped_roll = ''
self.d_stats = {}
self.dropped_d_stats = {}
self.result_d_stats = {}
self.art_dict = {}
self.meme_dict = {}
self.hidden_rolls = {}
def roll_input(self, user_input, optional_input):
"""complicated as fuck function that handles input without breaking"""
self.last_roll = []
self.result = 0
self.hidden = False
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.dropped_roll = ''
try:
split_input = dice_handler.match(user_input)
self.number_of_dice = split_input.group(1)
self.size_of_dice = split_input.group(3)
self.modifier = split_input.group(5)
self.modifier_number = split_input.group(6)
if self.number_of_dice == '':
self.number_of_dice = 1
if self.number_of_dice == '0':
self.error = "You can't roll 0 dice"
if int(self.number_of_dice) > 200:
self.error = (
'No! Thats to many dice I do not have that many!!!')
return
if self.size_of_dice == '0' or self.size_of_dice is None:
self.error = 'Please define the dice size.'
if int(self.size_of_dice) > 50000:
self.error = ('Dice too big!' +
' That has gotta be fake nothing goes this high')
return
if self.modifier is None and len(str(user_input)) > len(str(
self.number_of_dice) + str(self.size_of_dice) + 'D'):
self.error = ' Incorrect modifier. Please use + or -'
return
if self.modifier == '' or self.modifier is None:
self.modifier = '+'
self.modifier_number = '0'
if self.modifier_number == '' or self.modifier_number is None:
self.modifier = '+'
self.modifier_number = '0'
self.input_last_roll = ' `Rolled ' + str(self.number_of_dice
) + 'd' + str(self.size_of_dice) + str(self.modifier) + str(
self.modifier_number) + ':` '
if optional_input.lower() == 'adv':
self.adv = 'adv'
self.handle_adv()
elif optional_input.lower() == 'dadv':
self.adv = 'dadv'
self.handle_adv()
elif optional_input.lower() == 'sort':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() == 'hide':
self.hidden = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() != '':
self.error = str(optional_input
) + ' is not a valid option. Please try (sort/adv/dadv/hide)'
else:
self.roll_dice(self.number_of_dice, self.size_of_dice)
except AttributeError:
self.error = (
' Invalid input please follow this format (1)d20(+/-(5))')
except ValueError:
self.error = (
' Invalid input, please Make sure dice size is bigger than 0')
def roll_dice(self, number_of_dice, size_of_dice):
"""Simple function that rolls dice"""
dice = []
for roll in range(int(number_of_dice)):
roll = random.randint(1, int(size_of_dice))
dice.append(roll)
if self.sort is True:
dice.sort()
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
self.sort = False
else:
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
def calculate_roll(self):
"""Function to calculate the sum of the roll"""
for i in self.last_roll:
self.result = int(self.result) + int(i)
self.result = self.result + int(self.modifier + self.modifier_number)
def handle_adv(self):
"""Function that handles the optional advantage options"""
if self.adv == 'adv':
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) != '1':
self.error = 'Can only roll advantage with 2 dice, ya dummy!'
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) == '1':
self.number_of_dice = 2
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
self.dropped_roll = self.last_roll[0]
del self.last_roll[0]
self.adv = False
if self.adv == 'dadv':
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) != '1':
self.error = (
'Can only roll disadvantage with 2 dice, ya dummy!')
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) == '1':
self.number_of_dice = 2
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
self.dropped_roll = self.last_roll[1]
del self.last_roll[1]
self.adv = False
def roll_stats(self):
"""Lets you roll new stats for a char"""
for stat in range(6):
self.roll_input('4d6', 'sort')
self.dropped_d_stats[stat] = self.last_roll[0]
del self.last_roll[0]
self.calculate_roll()
self.result_d_stats[stat] = self.result
self.d_stats[stat] = self.last_roll
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RollBot:
"""A class that handles the bulk of functionality"""
def __init__(self):
"""initializes the attributes of the class"""
self.input_last_roll = ''
self.last_roll = []
self.result = 0
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.sort = False
self.adv = False
self.hidden = False
self.dropped_roll = ''
self.d_stats = {}
self.dropped_d_stats = {}
self.result_d_stats = {}
self.art_dict = {}
self.meme_dict = {}
self.hidden_rolls = {}
def roll_input(self, user_input, optional_input):
"""complicated as fuck function that handles input without breaking"""
self.last_roll = []
self.result = 0
self.hidden = False
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.dropped_roll = ''
try:
split_input = dice_handler.match(user_input)
self.number_of_dice = split_input.group(1)
self.size_of_dice = split_input.group(3)
self.modifier = split_input.group(5)
self.modifier_number = split_input.group(6)
if self.number_of_dice == '':
self.number_of_dice = 1
if self.number_of_dice == '0':
self.error = "You can't roll 0 dice"
if int(self.number_of_dice) > 200:
self.error = (
'No! Thats to many dice I do not have that many!!!')
return
if self.size_of_dice == '0' or self.size_of_dice is None:
self.error = 'Please define the dice size.'
if int(self.size_of_dice) > 50000:
self.error = ('Dice too big!' +
' That has gotta be fake nothing goes this high')
return
if self.modifier is None and len(str(user_input)) > len(str(
self.number_of_dice) + str(self.size_of_dice) + 'D'):
self.error = ' Incorrect modifier. Please use + or -'
return
if self.modifier == '' or self.modifier is None:
self.modifier = '+'
self.modifier_number = '0'
if self.modifier_number == '' or self.modifier_number is None:
self.modifier = '+'
self.modifier_number = '0'
self.input_last_roll = ' `Rolled ' + str(self.number_of_dice
) + 'd' + str(self.size_of_dice) + str(self.modifier) + str(
self.modifier_number) + ':` '
if optional_input.lower() == 'adv':
self.adv = 'adv'
self.handle_adv()
elif optional_input.lower() == 'dadv':
self.adv = 'dadv'
self.handle_adv()
elif optional_input.lower() == 'sort':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() == 'hide':
self.hidden = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() != '':
self.error = str(optional_input
) + ' is not a valid option. Please try (sort/adv/dadv/hide)'
else:
self.roll_dice(self.number_of_dice, self.size_of_dice)
except AttributeError:
self.error = (
' Invalid input please follow this format (1)d20(+/-(5))')
except ValueError:
self.error = (
' Invalid input, please Make sure dice size is bigger than 0')
def roll_dice(self, number_of_dice, size_of_dice):
"""Simple function that rolls dice"""
dice = []
for roll in range(int(number_of_dice)):
roll = random.randint(1, int(size_of_dice))
dice.append(roll)
if self.sort is True:
dice.sort()
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
self.sort = False
else:
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
def calculate_roll(self):
"""Function to calculate the sum of the roll"""
for i in self.last_roll:
self.result = int(self.result) + int(i)
self.result = self.result + int(self.modifier + self.modifier_number)
def handle_adv(self):
"""Function that handles the optional advantage options"""
if self.adv == 'adv':
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) != '1':
self.error = 'Can only roll advantage with 2 dice, ya dummy!'
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) == '1':
self.number_of_dice = 2
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
self.dropped_roll = self.last_roll[0]
del self.last_roll[0]
self.adv = False
if self.adv == 'dadv':
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) != '1':
self.error = (
'Can only roll disadvantage with 2 dice, ya dummy!')
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) == '1':
self.number_of_dice = 2
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
self.dropped_roll = self.last_roll[1]
del self.last_roll[1]
self.adv = False
def roll_stats(self):
"""Lets you roll new stats for a char"""
for stat in range(6):
self.roll_input('4d6', 'sort')
self.dropped_d_stats[stat] = self.last_roll[0]
del self.last_roll[0]
self.calculate_roll()
self.result_d_stats[stat] = self.result
self.d_stats[stat] = self.last_roll
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RollBot:
"""A class that handles the bulk of functionality"""
def __init__(self):
"""initializes the attributes of the class"""
self.input_last_roll = ''
self.last_roll = []
self.result = 0
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.sort = False
self.adv = False
self.hidden = False
self.dropped_roll = ''
self.d_stats = {}
self.dropped_d_stats = {}
self.result_d_stats = {}
self.art_dict = {}
self.meme_dict = {}
self.hidden_rolls = {}
def roll_input(self, user_input, optional_input):
"""complicated as fuck function that handles input without breaking"""
self.last_roll = []
self.result = 0
self.hidden = False
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.dropped_roll = ''
try:
split_input = dice_handler.match(user_input)
self.number_of_dice = split_input.group(1)
self.size_of_dice = split_input.group(3)
self.modifier = split_input.group(5)
self.modifier_number = split_input.group(6)
if self.number_of_dice == '':
self.number_of_dice = 1
if self.number_of_dice == '0':
self.error = "You can't roll 0 dice"
if int(self.number_of_dice) > 200:
self.error = (
'No! Thats to many dice I do not have that many!!!')
return
if self.size_of_dice == '0' or self.size_of_dice is None:
self.error = 'Please define the dice size.'
if int(self.size_of_dice) > 50000:
self.error = ('Dice too big!' +
' That has gotta be fake nothing goes this high')
return
if self.modifier is None and len(str(user_input)) > len(str(
self.number_of_dice) + str(self.size_of_dice) + 'D'):
self.error = ' Incorrect modifier. Please use + or -'
return
if self.modifier == '' or self.modifier is None:
self.modifier = '+'
self.modifier_number = '0'
if self.modifier_number == '' or self.modifier_number is None:
self.modifier = '+'
self.modifier_number = '0'
self.input_last_roll = ' `Rolled ' + str(self.number_of_dice
) + 'd' + str(self.size_of_dice) + str(self.modifier) + str(
self.modifier_number) + ':` '
if optional_input.lower() == 'adv':
self.adv = 'adv'
self.handle_adv()
elif optional_input.lower() == 'dadv':
self.adv = 'dadv'
self.handle_adv()
elif optional_input.lower() == 'sort':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() == 'hide':
self.hidden = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() != '':
self.error = str(optional_input
) + ' is not a valid option. Please try (sort/adv/dadv/hide)'
else:
self.roll_dice(self.number_of_dice, self.size_of_dice)
except AttributeError:
self.error = (
' Invalid input please follow this format (1)d20(+/-(5))')
except ValueError:
self.error = (
' Invalid input, please Make sure dice size is bigger than 0')
def roll_dice(self, number_of_dice, size_of_dice):
"""Simple function that rolls dice"""
dice = []
for roll in range(int(number_of_dice)):
roll = random.randint(1, int(size_of_dice))
dice.append(roll)
if self.sort is True:
dice.sort()
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
self.sort = False
else:
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
def calculate_roll(self):
"""Function to calculate the sum of the roll"""
for i in self.last_roll:
self.result = int(self.result) + int(i)
self.result = self.result + int(self.modifier + self.modifier_number)
def handle_adv(self):
"""Function that handles the optional advantage options"""
if self.adv == 'adv':
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) != '1':
self.error = 'Can only roll advantage with 2 dice, ya dummy!'
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) == '1':
self.number_of_dice = 2
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
self.dropped_roll = self.last_roll[0]
del self.last_roll[0]
self.adv = False
if self.adv == 'dadv':
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) != '1':
self.error = (
'Can only roll disadvantage with 2 dice, ya dummy!')
if str(self.number_of_dice) != '2' and str(self.number_of_dice
) == '1':
self.number_of_dice = 2
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
self.dropped_roll = self.last_roll[1]
del self.last_roll[1]
self.adv = False
def roll_stats(self):
"""Lets you roll new stats for a char"""
for stat in range(6):
self.roll_input('4d6', 'sort')
self.dropped_d_stats[stat] = self.last_roll[0]
del self.last_roll[0]
self.calculate_roll()
self.result_d_stats[stat] = self.result
self.d_stats[stat] = self.last_roll
dice_handler = re.compile('(\\d*)([dD])(\\d*)(([+-])(\\d*))?')
ph = profile_handler.PHandler()
<|reserved_special_token_1|>
import random
import profile_handler
import re
class RollBot():
"""A class that handles the bulk of functionality"""
def __init__(self):
"""initializes the attributes of the class"""
# this is where the procesed user input gets stored for easy readbacks
self.input_last_roll = ''
# an empty list to store the results of the roll in
self.last_roll = []
# The sum of all the roles inside the last_roll list
self.result = 0
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
self.modifier = ''
self.modifier_number = ''
self.sort = False
self.adv = False
self.hidden = False
# a flag to save the number of the dropped roll on an adv roll
self.dropped_roll = ''
# flags for rolling stats for a char
self.d_stats = {}
self.dropped_d_stats = {}
self.result_d_stats = {}
# flag for the art/meme/hidden rolls dictionary
self.art_dict = {}
self.meme_dict = {}
self.hidden_rolls = {}
def roll_input(self, user_input, optional_input):
"""complicated as fuck function that handles input without breaking"""
# Resets the status of everything before a new roll
self.last_roll = []
self.result = 0
self.hidden = False
# An empty error flag to easily throw errors back through discord
self.error = ''
self.number_of_dice = ''
self.size_of_dice = ''
# The modifier is either a + or a -, stored for easy acces
self.modifier = ''
self.modifier_number = ''
self.dropped_roll = ''
# this code is run in try to catch atribute errors due to a wrong input
try:
# All parts filtered by regex get stored in an object,
# that then gets split
split_input = dice_handler.match(user_input)
# sets the number of dice in the class for use in other functions
self.number_of_dice = split_input.group(1)
# sets the size of the dice in the class for use in other functions
self.size_of_dice = split_input.group(3)
# sets the +/- in the class for use in other functions
self.modifier = split_input.group(5)
# sets the number of the mod in the class to use in other functions
self.modifier_number = split_input.group(6)
# An if statements that alows typing 1 for rolling 1 dice to be
# optional.
if self.number_of_dice == '':
self.number_of_dice = 1
# Makes sure atleast 1 dice is rolled
if self.number_of_dice == '0':
self.error = "You can't roll 0 dice"
# Sets a cap of 200 dice being rolled
if int(self.number_of_dice) > 200:
self.error = \
'No! Thats to many dice I do not have that many!!!'
return
# Meant to catch errors where a none size dice managed to sneak
# Through.
if self.size_of_dice == '0' or self.size_of_dice is None:
self.error = "Please define the dice size."
# Sets a cap on how large of a dice you can roll.
if int(self.size_of_dice) > 50000:
self.error = "Dice too big!" + \
" That has gotta be fake nothing goes this high"
return
# Checks wether no modifier was entered or if it was incorrectly
# entered by checking the lenght of the input vs what came through.
if self.modifier is None and \
len(str(user_input)) > \
len(str(self.number_of_dice) +
str(self.size_of_dice) + 'D'):
self.error = " Incorrect modifier. Please use + or -"
return
# Sets modifier to +0 if no +/- is entered.
if self.modifier == '' or self.modifier is None:
self.modifier = '+'
self.modifier_number = '0'
# Sets modifier to +0 if no number for it was entered
if self.modifier_number == '' or self.modifier_number is None:
self.modifier = '+'
self.modifier_number = '0'
# The full input of the user in 1 flag to print back to the user
# at the end.
self.input_last_roll = \
' `Rolled ' + \
str(self.number_of_dice) + \
'd' + \
str(self.size_of_dice) + \
str(self.modifier) + \
str(self.modifier_number) + \
':` '
if optional_input.lower() == 'adv':
self.adv = 'adv'
self.handle_adv()
# Checks if user asked for disadvantage on a roll and hands it off
elif optional_input.lower() == 'dadv':
self.adv = 'dadv'
self.handle_adv()
# Checks if user asked for a sorted roll
elif optional_input.lower() == 'sort':
# Rolls the dice like normal but sorts the flag after.
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() == 'hide':
# Rolls the dice like normal but does not show the result in channel.
self.hidden = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
elif optional_input.lower() != '':
self.error = str(optional_input) + \
" is not a valid option. Please try (sort/adv/dadv/hide)"
else:
# If everything passed the checks hand offs the proccesed input
# to the randomizing and calculating functions.
self.roll_dice(self.number_of_dice, self.size_of_dice)
# Catches and attribute error on a wrong input and notifies the user.
except AttributeError:
self.error = \
" Invalid input please follow this format (1)d20(+/-(5))"
except ValueError:
self.error = \
" Invalid input, please Make sure dice size is bigger than 0"
def roll_dice(self, number_of_dice, size_of_dice):
"""Simple function that rolls dice"""
# makes a list of random numbers based on the information
# that was put in
dice = []
for roll in range(int(number_of_dice)):
roll = random.randint(1, int(size_of_dice))
dice.append(roll)
# Checks wether the result needs to be sorted or not
if self.sort is True:
dice.sort()
# Turns ints into strings after sorting
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
# Sets the last roll flag and returns to sort flag to false
self.last_roll = converted_dice
self.sort = False
# Sets the last roll flag for easy cross function use.
else:
# Turns Ints into strings incase it had to be sorted
converted_dice = []
for i in range(len(dice)):
roll_to_convert = dice[i]
roll_to_convert = str(roll_to_convert)
converted_dice.append(roll_to_convert)
self.last_roll = converted_dice
def calculate_roll(self):
"""Function to calculate the sum of the roll"""
# Takes all the numbers from the last roll and adds them up.
for i in self.last_roll:
self.result = int(self.result) + int(i)
self.result = self.result + int(self.modifier + self.modifier_number)
def handle_adv(self):
"""Function that handles the optional advantage options"""
# This part handles advantage so it takes the highest of the 2 numbers
# and then drops the lowest number
if self.adv == 'adv':
# Checks wether the number that was input is not 1 or 2.
if str(self.number_of_dice) != '2' and \
str(self.number_of_dice) != '1':
self.error = 'Can only roll advantage with 2 dice, ya dummy!'
# Checks if number of dice was left blank so automatically set to 1
if str(self.number_of_dice) != '2' and \
str(self.number_of_dice) == '1':
self.number_of_dice = 2
# Checks if the number of dice is 2 before moving on
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
# Stores the dropped roll before deleting it from last_roll
self.dropped_roll = self.last_roll[0]
del self.last_roll[0]
# Returns flag to default state
self.adv = False
# This part handles disadvantage so it takes the lowest of the 2
# numbers and then drops the highest number
if self.adv == 'dadv':
# Checks wether the number that was input is not 1 or 2.
if str(self.number_of_dice) != '2' and \
str(self.number_of_dice) != '1':
self.error = \
'Can only roll disadvantage with 2 dice, ya dummy!'
# Checks if number of dice was left blank so automatically set to 1
if str(self.number_of_dice) != '2' and \
str(self.number_of_dice) == '1':
self.number_of_dice = 2
# Checks if the number of dice is 2 before moving on
if str(self.number_of_dice) == '2':
self.sort = True
self.roll_dice(self.number_of_dice, self.size_of_dice)
# Stores the dropped roll before deleting it from last_roll
self.dropped_roll = self.last_roll[1]
del self.last_roll[1]
# Returns flag to default state
self.adv = False
def roll_stats(self):
"""Lets you roll new stats for a char"""
for stat in range(6):
self.roll_input('4d6', 'sort')
self.dropped_d_stats[stat] = self.last_roll[0]
del self.last_roll[0]
self.calculate_roll()
self.result_d_stats[stat] = self.result
self.d_stats[stat] = self.last_roll
# The regex that looks through the input for key information.
dice_handler = re.compile(r'(\d*)([dD])(\d*)(([+-])(\d*))?')
ph = profile_handler.PHandler()
|
flexible
|
{
"blob_id": "301a6ec56bd265ff63a924ecd64d6708cb6b139c",
"index": 8419,
"step-1": "<mask token>\n\n\nclass RollBot:\n <mask token>\n\n def __init__(self):\n \"\"\"initializes the attributes of the class\"\"\"\n self.input_last_roll = ''\n self.last_roll = []\n self.result = 0\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.sort = False\n self.adv = False\n self.hidden = False\n self.dropped_roll = ''\n self.d_stats = {}\n self.dropped_d_stats = {}\n self.result_d_stats = {}\n self.art_dict = {}\n self.meme_dict = {}\n self.hidden_rolls = {}\n <mask token>\n\n def roll_dice(self, number_of_dice, size_of_dice):\n \"\"\"Simple function that rolls dice\"\"\"\n dice = []\n for roll in range(int(number_of_dice)):\n roll = random.randint(1, int(size_of_dice))\n dice.append(roll)\n if self.sort is True:\n dice.sort()\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n self.sort = False\n else:\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n\n def calculate_roll(self):\n \"\"\"Function to calculate the sum of the roll\"\"\"\n for i in self.last_roll:\n self.result = int(self.result) + int(i)\n self.result = self.result + int(self.modifier + self.modifier_number)\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass RollBot:\n <mask token>\n\n def __init__(self):\n \"\"\"initializes the attributes of the class\"\"\"\n self.input_last_roll = ''\n self.last_roll = []\n self.result = 0\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.sort = False\n self.adv = False\n self.hidden = False\n self.dropped_roll = ''\n self.d_stats = {}\n self.dropped_d_stats = {}\n self.result_d_stats = {}\n self.art_dict = {}\n self.meme_dict = {}\n self.hidden_rolls = {}\n\n def roll_input(self, user_input, optional_input):\n \"\"\"complicated as fuck function that handles input without breaking\"\"\"\n self.last_roll = []\n self.result = 0\n self.hidden = False\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.dropped_roll = ''\n try:\n split_input = dice_handler.match(user_input)\n self.number_of_dice = split_input.group(1)\n self.size_of_dice = split_input.group(3)\n self.modifier = split_input.group(5)\n self.modifier_number = split_input.group(6)\n if self.number_of_dice == '':\n self.number_of_dice = 1\n if self.number_of_dice == '0':\n self.error = \"You can't roll 0 dice\"\n if int(self.number_of_dice) > 200:\n self.error = (\n 'No! Thats to many dice I do not have that many!!!')\n return\n if self.size_of_dice == '0' or self.size_of_dice is None:\n self.error = 'Please define the dice size.'\n if int(self.size_of_dice) > 50000:\n self.error = ('Dice too big!' +\n ' That has gotta be fake nothing goes this high')\n return\n if self.modifier is None and len(str(user_input)) > len(str(\n self.number_of_dice) + str(self.size_of_dice) + 'D'):\n self.error = ' Incorrect modifier. Please use + or -'\n return\n if self.modifier == '' or self.modifier is None:\n self.modifier = '+'\n self.modifier_number = '0'\n if self.modifier_number == '' or self.modifier_number is None:\n self.modifier = '+'\n self.modifier_number = '0'\n self.input_last_roll = ' `Rolled ' + str(self.number_of_dice\n ) + 'd' + str(self.size_of_dice) + str(self.modifier) + str(\n self.modifier_number) + ':` '\n if optional_input.lower() == 'adv':\n self.adv = 'adv'\n self.handle_adv()\n elif optional_input.lower() == 'dadv':\n self.adv = 'dadv'\n self.handle_adv()\n elif optional_input.lower() == 'sort':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n elif optional_input.lower() == 'hide':\n self.hidden = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n elif optional_input.lower() != '':\n self.error = str(optional_input\n ) + ' is not a valid option. Please try (sort/adv/dadv/hide)'\n else:\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n except AttributeError:\n self.error = (\n ' Invalid input please follow this format (1)d20(+/-(5))')\n except ValueError:\n self.error = (\n ' Invalid input, please Make sure dice size is bigger than 0')\n\n def roll_dice(self, number_of_dice, size_of_dice):\n \"\"\"Simple function that rolls dice\"\"\"\n dice = []\n for roll in range(int(number_of_dice)):\n roll = random.randint(1, int(size_of_dice))\n dice.append(roll)\n if self.sort is True:\n dice.sort()\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n self.sort = False\n else:\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n\n def calculate_roll(self):\n \"\"\"Function to calculate the sum of the roll\"\"\"\n for i in self.last_roll:\n self.result = int(self.result) + int(i)\n self.result = self.result + int(self.modifier + self.modifier_number)\n\n def handle_adv(self):\n \"\"\"Function that handles the optional advantage options\"\"\"\n if self.adv == 'adv':\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) != '1':\n self.error = 'Can only roll advantage with 2 dice, ya dummy!'\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) == '1':\n self.number_of_dice = 2\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n self.dropped_roll = self.last_roll[0]\n del self.last_roll[0]\n self.adv = False\n if self.adv == 'dadv':\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) != '1':\n self.error = (\n 'Can only roll disadvantage with 2 dice, ya dummy!')\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) == '1':\n self.number_of_dice = 2\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n self.dropped_roll = self.last_roll[1]\n del self.last_roll[1]\n self.adv = False\n\n def roll_stats(self):\n \"\"\"Lets you roll new stats for a char\"\"\"\n for stat in range(6):\n self.roll_input('4d6', 'sort')\n self.dropped_d_stats[stat] = self.last_roll[0]\n del self.last_roll[0]\n self.calculate_roll()\n self.result_d_stats[stat] = self.result\n self.d_stats[stat] = self.last_roll\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass RollBot:\n \"\"\"A class that handles the bulk of functionality\"\"\"\n\n def __init__(self):\n \"\"\"initializes the attributes of the class\"\"\"\n self.input_last_roll = ''\n self.last_roll = []\n self.result = 0\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.sort = False\n self.adv = False\n self.hidden = False\n self.dropped_roll = ''\n self.d_stats = {}\n self.dropped_d_stats = {}\n self.result_d_stats = {}\n self.art_dict = {}\n self.meme_dict = {}\n self.hidden_rolls = {}\n\n def roll_input(self, user_input, optional_input):\n \"\"\"complicated as fuck function that handles input without breaking\"\"\"\n self.last_roll = []\n self.result = 0\n self.hidden = False\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.dropped_roll = ''\n try:\n split_input = dice_handler.match(user_input)\n self.number_of_dice = split_input.group(1)\n self.size_of_dice = split_input.group(3)\n self.modifier = split_input.group(5)\n self.modifier_number = split_input.group(6)\n if self.number_of_dice == '':\n self.number_of_dice = 1\n if self.number_of_dice == '0':\n self.error = \"You can't roll 0 dice\"\n if int(self.number_of_dice) > 200:\n self.error = (\n 'No! Thats to many dice I do not have that many!!!')\n return\n if self.size_of_dice == '0' or self.size_of_dice is None:\n self.error = 'Please define the dice size.'\n if int(self.size_of_dice) > 50000:\n self.error = ('Dice too big!' +\n ' That has gotta be fake nothing goes this high')\n return\n if self.modifier is None and len(str(user_input)) > len(str(\n self.number_of_dice) + str(self.size_of_dice) + 'D'):\n self.error = ' Incorrect modifier. Please use + or -'\n return\n if self.modifier == '' or self.modifier is None:\n self.modifier = '+'\n self.modifier_number = '0'\n if self.modifier_number == '' or self.modifier_number is None:\n self.modifier = '+'\n self.modifier_number = '0'\n self.input_last_roll = ' `Rolled ' + str(self.number_of_dice\n ) + 'd' + str(self.size_of_dice) + str(self.modifier) + str(\n self.modifier_number) + ':` '\n if optional_input.lower() == 'adv':\n self.adv = 'adv'\n self.handle_adv()\n elif optional_input.lower() == 'dadv':\n self.adv = 'dadv'\n self.handle_adv()\n elif optional_input.lower() == 'sort':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n elif optional_input.lower() == 'hide':\n self.hidden = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n elif optional_input.lower() != '':\n self.error = str(optional_input\n ) + ' is not a valid option. Please try (sort/adv/dadv/hide)'\n else:\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n except AttributeError:\n self.error = (\n ' Invalid input please follow this format (1)d20(+/-(5))')\n except ValueError:\n self.error = (\n ' Invalid input, please Make sure dice size is bigger than 0')\n\n def roll_dice(self, number_of_dice, size_of_dice):\n \"\"\"Simple function that rolls dice\"\"\"\n dice = []\n for roll in range(int(number_of_dice)):\n roll = random.randint(1, int(size_of_dice))\n dice.append(roll)\n if self.sort is True:\n dice.sort()\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n self.sort = False\n else:\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n\n def calculate_roll(self):\n \"\"\"Function to calculate the sum of the roll\"\"\"\n for i in self.last_roll:\n self.result = int(self.result) + int(i)\n self.result = self.result + int(self.modifier + self.modifier_number)\n\n def handle_adv(self):\n \"\"\"Function that handles the optional advantage options\"\"\"\n if self.adv == 'adv':\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) != '1':\n self.error = 'Can only roll advantage with 2 dice, ya dummy!'\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) == '1':\n self.number_of_dice = 2\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n self.dropped_roll = self.last_roll[0]\n del self.last_roll[0]\n self.adv = False\n if self.adv == 'dadv':\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) != '1':\n self.error = (\n 'Can only roll disadvantage with 2 dice, ya dummy!')\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) == '1':\n self.number_of_dice = 2\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n self.dropped_roll = self.last_roll[1]\n del self.last_roll[1]\n self.adv = False\n\n def roll_stats(self):\n \"\"\"Lets you roll new stats for a char\"\"\"\n for stat in range(6):\n self.roll_input('4d6', 'sort')\n self.dropped_d_stats[stat] = self.last_roll[0]\n del self.last_roll[0]\n self.calculate_roll()\n self.result_d_stats[stat] = self.result\n self.d_stats[stat] = self.last_roll\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass RollBot:\n \"\"\"A class that handles the bulk of functionality\"\"\"\n\n def __init__(self):\n \"\"\"initializes the attributes of the class\"\"\"\n self.input_last_roll = ''\n self.last_roll = []\n self.result = 0\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.sort = False\n self.adv = False\n self.hidden = False\n self.dropped_roll = ''\n self.d_stats = {}\n self.dropped_d_stats = {}\n self.result_d_stats = {}\n self.art_dict = {}\n self.meme_dict = {}\n self.hidden_rolls = {}\n\n def roll_input(self, user_input, optional_input):\n \"\"\"complicated as fuck function that handles input without breaking\"\"\"\n self.last_roll = []\n self.result = 0\n self.hidden = False\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.dropped_roll = ''\n try:\n split_input = dice_handler.match(user_input)\n self.number_of_dice = split_input.group(1)\n self.size_of_dice = split_input.group(3)\n self.modifier = split_input.group(5)\n self.modifier_number = split_input.group(6)\n if self.number_of_dice == '':\n self.number_of_dice = 1\n if self.number_of_dice == '0':\n self.error = \"You can't roll 0 dice\"\n if int(self.number_of_dice) > 200:\n self.error = (\n 'No! Thats to many dice I do not have that many!!!')\n return\n if self.size_of_dice == '0' or self.size_of_dice is None:\n self.error = 'Please define the dice size.'\n if int(self.size_of_dice) > 50000:\n self.error = ('Dice too big!' +\n ' That has gotta be fake nothing goes this high')\n return\n if self.modifier is None and len(str(user_input)) > len(str(\n self.number_of_dice) + str(self.size_of_dice) + 'D'):\n self.error = ' Incorrect modifier. Please use + or -'\n return\n if self.modifier == '' or self.modifier is None:\n self.modifier = '+'\n self.modifier_number = '0'\n if self.modifier_number == '' or self.modifier_number is None:\n self.modifier = '+'\n self.modifier_number = '0'\n self.input_last_roll = ' `Rolled ' + str(self.number_of_dice\n ) + 'd' + str(self.size_of_dice) + str(self.modifier) + str(\n self.modifier_number) + ':` '\n if optional_input.lower() == 'adv':\n self.adv = 'adv'\n self.handle_adv()\n elif optional_input.lower() == 'dadv':\n self.adv = 'dadv'\n self.handle_adv()\n elif optional_input.lower() == 'sort':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n elif optional_input.lower() == 'hide':\n self.hidden = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n elif optional_input.lower() != '':\n self.error = str(optional_input\n ) + ' is not a valid option. Please try (sort/adv/dadv/hide)'\n else:\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n except AttributeError:\n self.error = (\n ' Invalid input please follow this format (1)d20(+/-(5))')\n except ValueError:\n self.error = (\n ' Invalid input, please Make sure dice size is bigger than 0')\n\n def roll_dice(self, number_of_dice, size_of_dice):\n \"\"\"Simple function that rolls dice\"\"\"\n dice = []\n for roll in range(int(number_of_dice)):\n roll = random.randint(1, int(size_of_dice))\n dice.append(roll)\n if self.sort is True:\n dice.sort()\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n self.sort = False\n else:\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n\n def calculate_roll(self):\n \"\"\"Function to calculate the sum of the roll\"\"\"\n for i in self.last_roll:\n self.result = int(self.result) + int(i)\n self.result = self.result + int(self.modifier + self.modifier_number)\n\n def handle_adv(self):\n \"\"\"Function that handles the optional advantage options\"\"\"\n if self.adv == 'adv':\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) != '1':\n self.error = 'Can only roll advantage with 2 dice, ya dummy!'\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) == '1':\n self.number_of_dice = 2\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n self.dropped_roll = self.last_roll[0]\n del self.last_roll[0]\n self.adv = False\n if self.adv == 'dadv':\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) != '1':\n self.error = (\n 'Can only roll disadvantage with 2 dice, ya dummy!')\n if str(self.number_of_dice) != '2' and str(self.number_of_dice\n ) == '1':\n self.number_of_dice = 2\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n self.dropped_roll = self.last_roll[1]\n del self.last_roll[1]\n self.adv = False\n\n def roll_stats(self):\n \"\"\"Lets you roll new stats for a char\"\"\"\n for stat in range(6):\n self.roll_input('4d6', 'sort')\n self.dropped_d_stats[stat] = self.last_roll[0]\n del self.last_roll[0]\n self.calculate_roll()\n self.result_d_stats[stat] = self.result\n self.d_stats[stat] = self.last_roll\n\n\ndice_handler = re.compile('(\\\\d*)([dD])(\\\\d*)(([+-])(\\\\d*))?')\nph = profile_handler.PHandler()\n",
"step-5": "import random\nimport profile_handler\nimport re\n\n\nclass RollBot():\n \"\"\"A class that handles the bulk of functionality\"\"\"\n\n def __init__(self):\n \"\"\"initializes the attributes of the class\"\"\"\n # this is where the procesed user input gets stored for easy readbacks\n self.input_last_roll = ''\n # an empty list to store the results of the roll in\n self.last_roll = []\n # The sum of all the roles inside the last_roll list\n self.result = 0\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n self.modifier = ''\n self.modifier_number = ''\n self.sort = False\n self.adv = False\n self.hidden = False\n # a flag to save the number of the dropped roll on an adv roll\n self.dropped_roll = ''\n # flags for rolling stats for a char\n self.d_stats = {}\n self.dropped_d_stats = {}\n self.result_d_stats = {}\n # flag for the art/meme/hidden rolls dictionary\n self.art_dict = {}\n self.meme_dict = {}\n self.hidden_rolls = {}\n\n def roll_input(self, user_input, optional_input):\n \"\"\"complicated as fuck function that handles input without breaking\"\"\"\n\n # Resets the status of everything before a new roll\n self.last_roll = []\n self.result = 0\n self.hidden = False\n # An empty error flag to easily throw errors back through discord\n self.error = ''\n self.number_of_dice = ''\n self.size_of_dice = ''\n # The modifier is either a + or a -, stored for easy acces\n self.modifier = ''\n self.modifier_number = ''\n self.dropped_roll = ''\n\n # this code is run in try to catch atribute errors due to a wrong input\n try:\n # All parts filtered by regex get stored in an object,\n # that then gets split\n split_input = dice_handler.match(user_input)\n # sets the number of dice in the class for use in other functions\n self.number_of_dice = split_input.group(1)\n # sets the size of the dice in the class for use in other functions\n self.size_of_dice = split_input.group(3)\n # sets the +/- in the class for use in other functions\n self.modifier = split_input.group(5)\n # sets the number of the mod in the class to use in other functions\n self.modifier_number = split_input.group(6)\n\n # An if statements that alows typing 1 for rolling 1 dice to be\n # optional.\n if self.number_of_dice == '':\n self.number_of_dice = 1\n # Makes sure atleast 1 dice is rolled\n if self.number_of_dice == '0':\n self.error = \"You can't roll 0 dice\"\n # Sets a cap of 200 dice being rolled\n if int(self.number_of_dice) > 200:\n self.error = \\\n 'No! Thats to many dice I do not have that many!!!'\n return\n\n # Meant to catch errors where a none size dice managed to sneak\n # Through.\n if self.size_of_dice == '0' or self.size_of_dice is None:\n self.error = \"Please define the dice size.\"\n # Sets a cap on how large of a dice you can roll.\n if int(self.size_of_dice) > 50000:\n self.error = \"Dice too big!\" + \\\n \" That has gotta be fake nothing goes this high\"\n return\n\n # Checks wether no modifier was entered or if it was incorrectly\n # entered by checking the lenght of the input vs what came through.\n if self.modifier is None and \\\n len(str(user_input)) > \\\n len(str(self.number_of_dice) +\n str(self.size_of_dice) + 'D'):\n self.error = \" Incorrect modifier. Please use + or -\"\n return\n\n # Sets modifier to +0 if no +/- is entered.\n if self.modifier == '' or self.modifier is None:\n self.modifier = '+'\n self.modifier_number = '0'\n\n # Sets modifier to +0 if no number for it was entered\n if self.modifier_number == '' or self.modifier_number is None:\n self.modifier = '+'\n self.modifier_number = '0'\n\n # The full input of the user in 1 flag to print back to the user\n # at the end.\n self.input_last_roll = \\\n ' `Rolled ' + \\\n str(self.number_of_dice) + \\\n 'd' + \\\n str(self.size_of_dice) + \\\n str(self.modifier) + \\\n str(self.modifier_number) + \\\n ':` '\n\n if optional_input.lower() == 'adv':\n self.adv = 'adv'\n self.handle_adv()\n\n # Checks if user asked for disadvantage on a roll and hands it off\n elif optional_input.lower() == 'dadv':\n self.adv = 'dadv'\n self.handle_adv()\n\n # Checks if user asked for a sorted roll\n elif optional_input.lower() == 'sort':\n # Rolls the dice like normal but sorts the flag after.\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n\n elif optional_input.lower() == 'hide':\n # Rolls the dice like normal but does not show the result in channel.\n self.hidden = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n\n elif optional_input.lower() != '':\n self.error = str(optional_input) + \\\n \" is not a valid option. Please try (sort/adv/dadv/hide)\"\n\n else:\n # If everything passed the checks hand offs the proccesed input\n # to the randomizing and calculating functions.\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n\n # Catches and attribute error on a wrong input and notifies the user.\n except AttributeError:\n self.error = \\\n \" Invalid input please follow this format (1)d20(+/-(5))\"\n except ValueError:\n self.error = \\\n \" Invalid input, please Make sure dice size is bigger than 0\"\n\n def roll_dice(self, number_of_dice, size_of_dice):\n \"\"\"Simple function that rolls dice\"\"\"\n # makes a list of random numbers based on the information\n # that was put in\n dice = []\n for roll in range(int(number_of_dice)):\n roll = random.randint(1, int(size_of_dice))\n dice.append(roll)\n\n # Checks wether the result needs to be sorted or not\n if self.sort is True:\n dice.sort()\n # Turns ints into strings after sorting\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n # Sets the last roll flag and returns to sort flag to false\n self.last_roll = converted_dice\n self.sort = False\n # Sets the last roll flag for easy cross function use.\n else:\n # Turns Ints into strings incase it had to be sorted\n converted_dice = []\n for i in range(len(dice)):\n roll_to_convert = dice[i]\n roll_to_convert = str(roll_to_convert)\n converted_dice.append(roll_to_convert)\n self.last_roll = converted_dice\n\n def calculate_roll(self):\n \"\"\"Function to calculate the sum of the roll\"\"\"\n # Takes all the numbers from the last roll and adds them up.\n for i in self.last_roll:\n self.result = int(self.result) + int(i)\n self.result = self.result + int(self.modifier + self.modifier_number)\n\n def handle_adv(self):\n \"\"\"Function that handles the optional advantage options\"\"\"\n\n # This part handles advantage so it takes the highest of the 2 numbers\n # and then drops the lowest number\n if self.adv == 'adv':\n # Checks wether the number that was input is not 1 or 2.\n if str(self.number_of_dice) != '2' and \\\n str(self.number_of_dice) != '1':\n self.error = 'Can only roll advantage with 2 dice, ya dummy!'\n\n # Checks if number of dice was left blank so automatically set to 1\n if str(self.number_of_dice) != '2' and \\\n str(self.number_of_dice) == '1':\n self.number_of_dice = 2\n\n # Checks if the number of dice is 2 before moving on\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n # Stores the dropped roll before deleting it from last_roll\n self.dropped_roll = self.last_roll[0]\n del self.last_roll[0]\n # Returns flag to default state\n self.adv = False\n\n # This part handles disadvantage so it takes the lowest of the 2\n # numbers and then drops the highest number\n if self.adv == 'dadv':\n # Checks wether the number that was input is not 1 or 2.\n if str(self.number_of_dice) != '2' and \\\n str(self.number_of_dice) != '1':\n self.error = \\\n 'Can only roll disadvantage with 2 dice, ya dummy!'\n\n # Checks if number of dice was left blank so automatically set to 1\n if str(self.number_of_dice) != '2' and \\\n str(self.number_of_dice) == '1':\n self.number_of_dice = 2\n\n # Checks if the number of dice is 2 before moving on\n if str(self.number_of_dice) == '2':\n self.sort = True\n self.roll_dice(self.number_of_dice, self.size_of_dice)\n # Stores the dropped roll before deleting it from last_roll\n self.dropped_roll = self.last_roll[1]\n del self.last_roll[1]\n # Returns flag to default state\n self.adv = False\n\n def roll_stats(self):\n \"\"\"Lets you roll new stats for a char\"\"\"\n\n for stat in range(6):\n self.roll_input('4d6', 'sort')\n self.dropped_d_stats[stat] = self.last_roll[0]\n del self.last_roll[0]\n self.calculate_roll()\n self.result_d_stats[stat] = self.result\n self.d_stats[stat] = self.last_roll\n\n\n# The regex that looks through the input for key information.\ndice_handler = re.compile(r'(\\d*)([dD])(\\d*)(([+-])(\\d*))?')\nph = profile_handler.PHandler()\n",
"step-ids": [
4,
7,
8,
9,
11
]
}
|
[
4,
7,
8,
9,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while test_case != 0:
test_case -= 1
n, m = map(int, input().split())
ans = n * m
A = []
for i in range(n):
t = list(map(int, input().split()))
A.append(t)
for i in range(1, n - 1):
for j in range(1, m - 1):
k = 1
while j - k >= 0 and i - k >= 0 and j + k < m and i + k < n:
l = A[i][j - k]
r = A[i][j + k]
u = A[i - k][j]
d = A[i + k][j]
if l == r and u == d:
ans += 1
else:
break
k += 1
print(ans)
<|reserved_special_token_1|>
test_case = int(input())
while test_case != 0:
test_case -= 1
n, m = map(int, input().split())
ans = n * m
A = []
for i in range(n):
t = list(map(int, input().split()))
A.append(t)
for i in range(1, n - 1):
for j in range(1, m - 1):
k = 1
while j - k >= 0 and i - k >= 0 and j + k < m and i + k < n:
l = A[i][j - k]
r = A[i][j + k]
u = A[i - k][j]
d = A[i + k][j]
if l == r and u == d:
ans += 1
else:
break
k += 1
print(ans)
<|reserved_special_token_1|>
test_case = int(input())
while test_case != 0:
test_case -= 1
(n, m) = map(int, input().split())
ans = n * m
A = []
for i in range(n):
t = list(map(int, input().split()))
A.append(t)
for i in range(1, n - 1):
for j in range(1, m - 1):
k = 1
while j - k >= 0 and i - k >= 0 and j + k < m and i + k < n:
l = A[i][j - k]
r = A[i][j + k]
u = A[i - k][j]
d = A[i + k][j]
if l == r and u == d:
ans += 1
else:
break
k += 1
print(ans)
|
flexible
|
{
"blob_id": "dbc3e51fed63fe0fadea67d05c4b4efc693938a3",
"index": 1487,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile test_case != 0:\n test_case -= 1\n n, m = map(int, input().split())\n ans = n * m\n A = []\n for i in range(n):\n t = list(map(int, input().split()))\n A.append(t)\n for i in range(1, n - 1):\n for j in range(1, m - 1):\n k = 1\n while j - k >= 0 and i - k >= 0 and j + k < m and i + k < n:\n l = A[i][j - k]\n r = A[i][j + k]\n u = A[i - k][j]\n d = A[i + k][j]\n if l == r and u == d:\n ans += 1\n else:\n break\n k += 1\n print(ans)\n",
"step-3": "test_case = int(input())\nwhile test_case != 0:\n test_case -= 1\n n, m = map(int, input().split())\n ans = n * m\n A = []\n for i in range(n):\n t = list(map(int, input().split()))\n A.append(t)\n for i in range(1, n - 1):\n for j in range(1, m - 1):\n k = 1\n while j - k >= 0 and i - k >= 0 and j + k < m and i + k < n:\n l = A[i][j - k]\n r = A[i][j + k]\n u = A[i - k][j]\n d = A[i + k][j]\n if l == r and u == d:\n ans += 1\n else:\n break\n k += 1\n print(ans)\n",
"step-4": "test_case = int(input())\nwhile test_case != 0:\n test_case -= 1\n (n, m) = map(int, input().split())\n ans = n * m\n A = []\n for i in range(n):\n t = list(map(int, input().split()))\n A.append(t)\n\n for i in range(1, n - 1):\n for j in range(1, m - 1):\n k = 1\n while j - k >= 0 and i - k >= 0 and j + k < m and i + k < n:\n l = A[i][j - k]\n r = A[i][j + k]\n u = A[i - k][j]\n d = A[i + k][j]\n if l == r and u == d:\n ans += 1\n else:\n break\n k += 1\n print(ans)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import models
class Building(models.Model):
Number = models.CharField(max_length=60)
Description = models.CharField(max_length=120)
OSMWAYID = models.DecimalField(decimal_places=0, max_digits=15) # the osm way id
Lat = models.CharField(max_length=20) #lat/lon of then center
Lon = models.CharField(max_length=20) # lat/lon of the center of the building
class BuildingPoint(models.Model):
parent = models.ForeignKey('Building', null=False, blank=False, related_name='points')
OSMNODEID = models.DecimalField(decimal_places=0, max_digits=15) # the osm id
Lat = models.CharField(max_length=20) #lat/lon of then center
Lon = models.CharField(max_length=20) # lat/lon of the center of the building
class Facinet(models.Model):
##
Building = models.ForeignKey('Building', null=False, blank=False, related_name='FacinetNodes')
location = models.IntegerField(unique=True, db_column='Location') #
name = models.TextField(db_column='Name') #
connectionstring = models.TextField(db_column='ConnectionString') #
tapidevice = models.TextField(db_column='TapiDevice', blank=True) #
synctime = models.CharField(max_length=3, db_column='SyncTime') #
online = models.CharField(max_length=3, db_column='Online') #
onlineall = models.CharField(max_length=3, db_column='OnlineAll') #
## location for display
Lat = models.CharField(max_length=20) #lat/lon of facinet collector
Lon = models.CharField(max_length=20) # lat/lon of facinet collector
class Logger(models.Model):
Facinet = models.ForeignKey('Facinet', null=False, blank=False, related_name='Loggers')
loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex') #
name = models.TextField(db_column='Name') #
online = models.IntegerField(db_column='Online') #
## location for display
Lat = models.CharField(max_length=20) #lat/lon of the logger
Lon = models.CharField(max_length=20) # lat/lon of the logger
class LoggerMeasurement(models.Model):
Logger = models.ForeignKey('Logger', null=False, blank=False, related_name='Measurement')
timestamp = models.DateTimeField()
measurement = models.DecimalField(max_digits=12, decimal_places=4)
|
normal
|
{
"blob_id": "02ddf213cd3f455f8d8fbde8621fc4788124d5a9",
"index": 3714,
"step-1": "<mask token>\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-2": "<mask token>\n\n\nclass Facinet(models.Model):\n Building = models.ForeignKey('Building', null=False, blank=False,\n related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location')\n name = models.TextField(db_column='Name')\n connectionstring = models.TextField(db_column='ConnectionString')\n tapidevice = models.TextField(db_column='TapiDevice', blank=True)\n synctime = models.CharField(max_length=3, db_column='SyncTime')\n online = models.CharField(max_length=3, db_column='Online')\n onlineall = models.CharField(max_length=3, db_column='OnlineAll')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-3": "<mask token>\n\n\nclass BuildingPoint(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Facinet(models.Model):\n Building = models.ForeignKey('Building', null=False, blank=False,\n related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location')\n name = models.TextField(db_column='Name')\n connectionstring = models.TextField(db_column='ConnectionString')\n tapidevice = models.TextField(db_column='TapiDevice', blank=True)\n synctime = models.CharField(max_length=3, db_column='SyncTime')\n online = models.CharField(max_length=3, db_column='Online')\n onlineall = models.CharField(max_length=3, db_column='OnlineAll')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-4": "from django.db import models\n\n\nclass Building(models.Model):\n Number = models.CharField(max_length=60)\n Description = models.CharField(max_length=120)\n OSMWAYID = models.DecimalField(decimal_places=0, max_digits=15)\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass BuildingPoint(models.Model):\n parent = models.ForeignKey('Building', null=False, blank=False,\n related_name='points')\n OSMNODEID = models.DecimalField(decimal_places=0, max_digits=15)\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Facinet(models.Model):\n Building = models.ForeignKey('Building', null=False, blank=False,\n related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location')\n name = models.TextField(db_column='Name')\n connectionstring = models.TextField(db_column='ConnectionString')\n tapidevice = models.TextField(db_column='TapiDevice', blank=True)\n synctime = models.CharField(max_length=3, db_column='SyncTime')\n online = models.CharField(max_length=3, db_column='Online')\n onlineall = models.CharField(max_length=3, db_column='OnlineAll')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-5": "from django.db import models\n\n\nclass Building(models.Model):\n Number = models.CharField(max_length=60)\n Description = models.CharField(max_length=120)\n OSMWAYID = models.DecimalField(decimal_places=0, max_digits=15) # the osm way id\n Lat = models.CharField(max_length=20) #lat/lon of then center\n Lon = models.CharField(max_length=20) # lat/lon of the center of the building\n\n\nclass BuildingPoint(models.Model):\n parent = models.ForeignKey('Building', null=False, blank=False, related_name='points')\n OSMNODEID = models.DecimalField(decimal_places=0, max_digits=15) # the osm id\n Lat = models.CharField(max_length=20) #lat/lon of then center\n Lon = models.CharField(max_length=20) # lat/lon of the center of the building\n\n\nclass Facinet(models.Model):\n ##\n Building = models.ForeignKey('Building', null=False, blank=False, related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location') # \n name = models.TextField(db_column='Name') # \n connectionstring = models.TextField(db_column='ConnectionString') # \n tapidevice = models.TextField(db_column='TapiDevice', blank=True) # \n synctime = models.CharField(max_length=3, db_column='SyncTime') # \n online = models.CharField(max_length=3, db_column='Online') # \n onlineall = models.CharField(max_length=3, db_column='OnlineAll') # \n ## location for display\n Lat = models.CharField(max_length=20) #lat/lon of facinet collector\n Lon = models.CharField(max_length=20) # lat/lon of facinet collector\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False, related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex') # \n name = models.TextField(db_column='Name') # \n online = models.IntegerField(db_column='Online') # \n ## location for display\n Lat = models.CharField(max_length=20) #lat/lon of the logger\n Lon = models.CharField(max_length=20) # lat/lon of the logger\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False, related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-ids": [
4,
6,
7,
11,
12
]
}
|
[
4,
6,
7,
11,
12
] |
from django.conf.urls.defaults import *
## reports view
urlpatterns = patterns('commtrack_reports.views',
(r'^commtrackreports$', 'reports'),
(r'^sampling_points$', 'sampling_points'),
(r'^commtrack_testers$', 'testers'),
(r'^date_range$', 'date_range'),
(r'^create_report$', 'create_report'),
(r'^export_csv$', 'export_csv'),
(r'^export_pdf$', 'pdf_view'),
# (r'^test$', 'test'),
)
|
normal
|
{
"blob_id": "6d244b719200ae2a9c1a738e746e8c401f8ba4e2",
"index": 3342,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = patterns('commtrack_reports.views', ('^commtrackreports$',\n 'reports'), ('^sampling_points$', 'sampling_points'), (\n '^commtrack_testers$', 'testers'), ('^date_range$', 'date_range'), (\n '^create_report$', 'create_report'), ('^export_csv$', 'export_csv'), (\n '^export_pdf$', 'pdf_view'))\n",
"step-3": "from django.conf.urls.defaults import *\nurlpatterns = patterns('commtrack_reports.views', ('^commtrackreports$',\n 'reports'), ('^sampling_points$', 'sampling_points'), (\n '^commtrack_testers$', 'testers'), ('^date_range$', 'date_range'), (\n '^create_report$', 'create_report'), ('^export_csv$', 'export_csv'), (\n '^export_pdf$', 'pdf_view'))\n",
"step-4": "from django.conf.urls.defaults import *\n\n## reports view\nurlpatterns = patterns('commtrack_reports.views',\n (r'^commtrackreports$', 'reports'),\n (r'^sampling_points$', 'sampling_points'),\n (r'^commtrack_testers$', 'testers'),\n (r'^date_range$', 'date_range'),\n (r'^create_report$', 'create_report'),\n (r'^export_csv$', 'export_csv'),\n (r'^export_pdf$', 'pdf_view'),\n# (r'^test$', 'test'),\n\n)\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.conf.urls import patterns, url
urlpatterns = patterns(
'',
url(
r'^create_new/$',
'hx_lti_assignment.views.create_new_assignment',
name="create_new_assignment",
),
url(
r'^(?P<id>[0-9]+)/edit/',
'hx_lti_assignment.views.edit_assignment',
name="edit_assignment",
),
url(
r'^(?P<id>[0-9]+)/delete/',
'hx_lti_assignment.views.delete_assignment',
name="delete_assignment",
),
url(
r'^import_assignment/$',
'hx_lti_assignment.views.import_assignment',
name="import_assignment",
),
url(
r'^(?P<course_id>[0-9]+)/get_assignments',
'hx_lti_assignment.views.assignments_from_course',
name="assignments_from_course",
),
url(
r'^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import',
'hx_lti_assignment.views.moving_assignment',
name="moving_assignment",
),
)
|
normal
|
{
"blob_id": "2194fb4f0b0618f1c8db39f659a4890457f45b1d",
"index": 3963,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = patterns('', url('^create_new/$',\n 'hx_lti_assignment.views.create_new_assignment', name=\n 'create_new_assignment'), url('^(?P<id>[0-9]+)/edit/',\n 'hx_lti_assignment.views.edit_assignment', name='edit_assignment'), url\n ('^(?P<id>[0-9]+)/delete/', 'hx_lti_assignment.views.delete_assignment',\n name='delete_assignment'), url('^import_assignment/$',\n 'hx_lti_assignment.views.import_assignment', name='import_assignment'),\n url('^(?P<course_id>[0-9]+)/get_assignments',\n 'hx_lti_assignment.views.assignments_from_course', name=\n 'assignments_from_course'), url(\n '^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import'\n , 'hx_lti_assignment.views.moving_assignment', name='moving_assignment'))\n",
"step-3": "from django.conf.urls import patterns, url\nurlpatterns = patterns('', url('^create_new/$',\n 'hx_lti_assignment.views.create_new_assignment', name=\n 'create_new_assignment'), url('^(?P<id>[0-9]+)/edit/',\n 'hx_lti_assignment.views.edit_assignment', name='edit_assignment'), url\n ('^(?P<id>[0-9]+)/delete/', 'hx_lti_assignment.views.delete_assignment',\n name='delete_assignment'), url('^import_assignment/$',\n 'hx_lti_assignment.views.import_assignment', name='import_assignment'),\n url('^(?P<course_id>[0-9]+)/get_assignments',\n 'hx_lti_assignment.views.assignments_from_course', name=\n 'assignments_from_course'), url(\n '^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import'\n , 'hx_lti_assignment.views.moving_assignment', name='moving_assignment'))\n",
"step-4": "from django.conf.urls import patterns, url\n\nurlpatterns = patterns(\n '',\n url(\n r'^create_new/$',\n 'hx_lti_assignment.views.create_new_assignment',\n name=\"create_new_assignment\",\n ),\n url(\n r'^(?P<id>[0-9]+)/edit/',\n 'hx_lti_assignment.views.edit_assignment',\n name=\"edit_assignment\",\n ),\n url(\n r'^(?P<id>[0-9]+)/delete/',\n 'hx_lti_assignment.views.delete_assignment',\n name=\"delete_assignment\",\n ),\n url(\n r'^import_assignment/$',\n 'hx_lti_assignment.views.import_assignment',\n name=\"import_assignment\",\n ),\n url(\n r'^(?P<course_id>[0-9]+)/get_assignments',\n 'hx_lti_assignment.views.assignments_from_course',\n name=\"assignments_from_course\",\n ),\n url(\n r'^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import',\n 'hx_lti_assignment.views.moving_assignment',\n name=\"moving_assignment\",\n ),\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# coding: UTF-8
import os
import sys
if len(sys.argv) == 3:
fname = sys.argv[1]
out_dir = sys.argv[2]
else:
print "usage: vcf_spliter <input file> <output dir>"
exit()
count = 0
if not os.path.exists(out_dir):
os.makedirs(out_dir)
with open(fname, 'r') as f:
for l in f:
if l.strip() == "BEGIN:VCARD":
count += 1
fw = open(os.path.join(out_dir, str(count)+'.vcf'), 'w')
fw.write(l)
elif l.strip() == "END:VCARD":
fw.write(l)
fw.close()
else:
fw.write(l)
|
normal
|
{
"blob_id": "f410a77d4041514383110d9fd16f896178924d59",
"index": 8871,
"step-1": "# coding: UTF-8\n\nimport os \nimport sys\n\nif len(sys.argv) == 3:\n fname = sys.argv[1]\n out_dir = sys.argv[2]\nelse:\n print \"usage: vcf_spliter <input file> <output dir>\"\n exit()\n\ncount = 0\nif not os.path.exists(out_dir):\n os.makedirs(out_dir)\n\nwith open(fname, 'r') as f:\n for l in f:\n if l.strip() == \"BEGIN:VCARD\":\n count += 1\n fw = open(os.path.join(out_dir, str(count)+'.vcf'), 'w')\n fw.write(l)\n elif l.strip() == \"END:VCARD\":\n fw.write(l)\n fw.close()\n else:\n fw.write(l)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class RPCStub(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RPCStub(object):
def __init__(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RPCStub(object):
def __init__(self):
pass
def SET(self, key, value):
self
print('{}: {}'.format(key, value))
<|reserved_special_token_1|>
from pydis.datastruct.sds import SdsImp
class RPCStub(object):
def __init__(self):
pass
def SET(self, key, value):
self
print('{}: {}'.format(key, value))
<|reserved_special_token_1|>
from pydis.datastruct.sds import SdsImp
class RPCStub(object):
def __init__(self):
pass
def SET(self, key, value):
self
print("{}: {}".format(key, value))
|
flexible
|
{
"blob_id": "74f85732b4e1f4ef2b82a48818cbaedb18a56083",
"index": 8122,
"step-1": "<mask token>\n\n\nclass RPCStub(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RPCStub(object):\n\n def __init__(self):\n pass\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass RPCStub(object):\n\n def __init__(self):\n pass\n\n def SET(self, key, value):\n self\n print('{}: {}'.format(key, value))\n",
"step-4": "from pydis.datastruct.sds import SdsImp\n\n\nclass RPCStub(object):\n\n def __init__(self):\n pass\n\n def SET(self, key, value):\n self\n print('{}: {}'.format(key, value))\n",
"step-5": "from pydis.datastruct.sds import SdsImp\n\n\nclass RPCStub(object):\n def __init__(self):\n pass\n\n def SET(self, key, value):\n self\n print(\"{}: {}\".format(key, value))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from field import print_field
from math_utilite import sign, col
def start_parameter_2(par):
global cell_king, castling_control, trans, take_on_aisle
cell_king = par[0]
castling_control = par[1]
trans = par[2]
take_on_aisle = par[3]
def det_cell_king(field):
global cell_king
cell_king = {sign(fig):(x, y) for x, row in enumerate(field) for y, fig in enumerate(row) if abs(fig)==6}
return cell_king
def det_castling_control(field):
global castling_control
for color in (1, -1):
hor = 0 if color == 1 else 7
dk = 0 if field[hor][4] == 6*color else 1
dlr = 0 if field[hor][0] == 2*color else 1
drr = 0 if field[hor][-1] == 2*color else 1
castling_control[color] = (dk, dlr, drr)
return castling_control
def king_and_castling(field, color, old, new, d):
global cell_king, castling_control
cell_king[color] = (new[0], new[1])
storlg=new[1]-old[1]
if abs(storlg) == 2:
storlg = sign(storlg)
rp = 7 if storlg*d == 1 else 0
field[new[0]][new[1]-storlg] = 2*color if d == 1 else 0
field[new[0]][rp] = 0 if d == 1 else 2*color
cont = castling_control[color]
castling_control[color] = (cont[0], cont[1]-storlg+d, cont[2]+storlg+d)
castling_control[color] = (castling_control[color][0]+d, castling_control[color][1], castling_control[color][2])
def rook(field, color, old, new, d):
global castling_control
hor = 0 if color == 1 else 7
cont = castling_control[color]
x, y = old if d == 1 else new
if x == hor and y % 7 == 0:
castling_control[color] = (cont[0], cont[1] + d*(-sign(y-3)+1), cont[2] + d*(sign(y-3)+1))
def trans_pawn(color, old):
return True if (old[0] * color) % 7 == 6 else False
def take_on_aisle_pawn(color, old, new):
global take_on_aisle
if abs(new[0]-old[0]) == 2:
take_on_aisle = (color, new[1])
else:
take_on_aisle = ('l', 8)
return take_on_aisle
def take_on_aisle_move(field, color, old, new, fig, d, main):
global take_on_aisle
if main == 1:
take_on_aisle_pawn(color, old, new)
if abs(old[1]-new[1]) == 1:
if field[new[0]][new[1]] == 0 and d == 1:
field[old[0]][new[1]] = 0
if fig == 0 and d == -1:
field[new[0]][old[1]] = -color
def move(field, old, new, fig=0, d=1, trans_fig=1, main=0):
global trans, take_on_aisle
color = sign(field[old[0]][old[1]])
figure = abs(field[old[0]][old[1]])
if figure == 2:
rook(field, color, old, new, d)
if figure == 6:
king_and_castling(field, color, old, new, d)
if trans == True:
figure = 1
trans = False
if figure == 1:
trans = trans_pawn(color, old) if d == 1 else False
if trans == True:
figure = trans_fig
take_on_aisle_move(field, color, old, new, fig, d, main)
if main == 1:
trans = False
field[new[0]][new[1]] = color*figure
field[old[0]][old[1]] = fig
|
normal
|
{
"blob_id": "90c9456bf22745d99fa76dbc752beae1a3835682",
"index": 7672,
"step-1": "<mask token>\n\n\ndef det_cell_king(field):\n global cell_king\n cell_king = {sign(fig): (x, y) for x, row in enumerate(field) for y,\n fig in enumerate(row) if abs(fig) == 6}\n return cell_king\n\n\n<mask token>\n\n\ndef rook(field, color, old, new, d):\n global castling_control\n hor = 0 if color == 1 else 7\n cont = castling_control[color]\n x, y = old if d == 1 else new\n if x == hor and y % 7 == 0:\n castling_control[color] = cont[0], cont[1] + d * (-sign(y - 3) + 1\n ), cont[2] + d * (sign(y - 3) + 1)\n\n\ndef trans_pawn(color, old):\n return True if old[0] * color % 7 == 6 else False\n\n\ndef take_on_aisle_pawn(color, old, new):\n global take_on_aisle\n if abs(new[0] - old[0]) == 2:\n take_on_aisle = color, new[1]\n else:\n take_on_aisle = 'l', 8\n return take_on_aisle\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef start_parameter_2(par):\n global cell_king, castling_control, trans, take_on_aisle\n cell_king = par[0]\n castling_control = par[1]\n trans = par[2]\n take_on_aisle = par[3]\n\n\ndef det_cell_king(field):\n global cell_king\n cell_king = {sign(fig): (x, y) for x, row in enumerate(field) for y,\n fig in enumerate(row) if abs(fig) == 6}\n return cell_king\n\n\ndef det_castling_control(field):\n global castling_control\n for color in (1, -1):\n hor = 0 if color == 1 else 7\n dk = 0 if field[hor][4] == 6 * color else 1\n dlr = 0 if field[hor][0] == 2 * color else 1\n drr = 0 if field[hor][-1] == 2 * color else 1\n castling_control[color] = dk, dlr, drr\n return castling_control\n\n\n<mask token>\n\n\ndef rook(field, color, old, new, d):\n global castling_control\n hor = 0 if color == 1 else 7\n cont = castling_control[color]\n x, y = old if d == 1 else new\n if x == hor and y % 7 == 0:\n castling_control[color] = cont[0], cont[1] + d * (-sign(y - 3) + 1\n ), cont[2] + d * (sign(y - 3) + 1)\n\n\ndef trans_pawn(color, old):\n return True if old[0] * color % 7 == 6 else False\n\n\ndef take_on_aisle_pawn(color, old, new):\n global take_on_aisle\n if abs(new[0] - old[0]) == 2:\n take_on_aisle = color, new[1]\n else:\n take_on_aisle = 'l', 8\n return take_on_aisle\n\n\ndef take_on_aisle_move(field, color, old, new, fig, d, main):\n global take_on_aisle\n if main == 1:\n take_on_aisle_pawn(color, old, new)\n if abs(old[1] - new[1]) == 1:\n if field[new[0]][new[1]] == 0 and d == 1:\n field[old[0]][new[1]] = 0\n if fig == 0 and d == -1:\n field[new[0]][old[1]] = -color\n\n\ndef move(field, old, new, fig=0, d=1, trans_fig=1, main=0):\n global trans, take_on_aisle\n color = sign(field[old[0]][old[1]])\n figure = abs(field[old[0]][old[1]])\n if figure == 2:\n rook(field, color, old, new, d)\n if figure == 6:\n king_and_castling(field, color, old, new, d)\n if trans == True:\n figure = 1\n trans = False\n if figure == 1:\n trans = trans_pawn(color, old) if d == 1 else False\n if trans == True:\n figure = trans_fig\n take_on_aisle_move(field, color, old, new, fig, d, main)\n if main == 1:\n trans = False\n field[new[0]][new[1]] = color * figure\n field[old[0]][old[1]] = fig\n",
"step-3": "<mask token>\n\n\ndef start_parameter_2(par):\n global cell_king, castling_control, trans, take_on_aisle\n cell_king = par[0]\n castling_control = par[1]\n trans = par[2]\n take_on_aisle = par[3]\n\n\ndef det_cell_king(field):\n global cell_king\n cell_king = {sign(fig): (x, y) for x, row in enumerate(field) for y,\n fig in enumerate(row) if abs(fig) == 6}\n return cell_king\n\n\ndef det_castling_control(field):\n global castling_control\n for color in (1, -1):\n hor = 0 if color == 1 else 7\n dk = 0 if field[hor][4] == 6 * color else 1\n dlr = 0 if field[hor][0] == 2 * color else 1\n drr = 0 if field[hor][-1] == 2 * color else 1\n castling_control[color] = dk, dlr, drr\n return castling_control\n\n\ndef king_and_castling(field, color, old, new, d):\n global cell_king, castling_control\n cell_king[color] = new[0], new[1]\n storlg = new[1] - old[1]\n if abs(storlg) == 2:\n storlg = sign(storlg)\n rp = 7 if storlg * d == 1 else 0\n field[new[0]][new[1] - storlg] = 2 * color if d == 1 else 0\n field[new[0]][rp] = 0 if d == 1 else 2 * color\n cont = castling_control[color]\n castling_control[color] = cont[0], cont[1] - storlg + d, cont[2\n ] + storlg + d\n castling_control[color] = castling_control[color][0] + d, castling_control[\n color][1], castling_control[color][2]\n\n\ndef rook(field, color, old, new, d):\n global castling_control\n hor = 0 if color == 1 else 7\n cont = castling_control[color]\n x, y = old if d == 1 else new\n if x == hor and y % 7 == 0:\n castling_control[color] = cont[0], cont[1] + d * (-sign(y - 3) + 1\n ), cont[2] + d * (sign(y - 3) + 1)\n\n\ndef trans_pawn(color, old):\n return True if old[0] * color % 7 == 6 else False\n\n\ndef take_on_aisle_pawn(color, old, new):\n global take_on_aisle\n if abs(new[0] - old[0]) == 2:\n take_on_aisle = color, new[1]\n else:\n take_on_aisle = 'l', 8\n return take_on_aisle\n\n\ndef take_on_aisle_move(field, color, old, new, fig, d, main):\n global take_on_aisle\n if main == 1:\n take_on_aisle_pawn(color, old, new)\n if abs(old[1] - new[1]) == 1:\n if field[new[0]][new[1]] == 0 and d == 1:\n field[old[0]][new[1]] = 0\n if fig == 0 and d == -1:\n field[new[0]][old[1]] = -color\n\n\ndef move(field, old, new, fig=0, d=1, trans_fig=1, main=0):\n global trans, take_on_aisle\n color = sign(field[old[0]][old[1]])\n figure = abs(field[old[0]][old[1]])\n if figure == 2:\n rook(field, color, old, new, d)\n if figure == 6:\n king_and_castling(field, color, old, new, d)\n if trans == True:\n figure = 1\n trans = False\n if figure == 1:\n trans = trans_pawn(color, old) if d == 1 else False\n if trans == True:\n figure = trans_fig\n take_on_aisle_move(field, color, old, new, fig, d, main)\n if main == 1:\n trans = False\n field[new[0]][new[1]] = color * figure\n field[old[0]][old[1]] = fig\n",
"step-4": "from field import print_field\nfrom math_utilite import sign, col\n\n\ndef start_parameter_2(par):\n global cell_king, castling_control, trans, take_on_aisle\n cell_king = par[0]\n castling_control = par[1]\n trans = par[2]\n take_on_aisle = par[3]\n\n\ndef det_cell_king(field):\n global cell_king\n cell_king = {sign(fig): (x, y) for x, row in enumerate(field) for y,\n fig in enumerate(row) if abs(fig) == 6}\n return cell_king\n\n\ndef det_castling_control(field):\n global castling_control\n for color in (1, -1):\n hor = 0 if color == 1 else 7\n dk = 0 if field[hor][4] == 6 * color else 1\n dlr = 0 if field[hor][0] == 2 * color else 1\n drr = 0 if field[hor][-1] == 2 * color else 1\n castling_control[color] = dk, dlr, drr\n return castling_control\n\n\ndef king_and_castling(field, color, old, new, d):\n global cell_king, castling_control\n cell_king[color] = new[0], new[1]\n storlg = new[1] - old[1]\n if abs(storlg) == 2:\n storlg = sign(storlg)\n rp = 7 if storlg * d == 1 else 0\n field[new[0]][new[1] - storlg] = 2 * color if d == 1 else 0\n field[new[0]][rp] = 0 if d == 1 else 2 * color\n cont = castling_control[color]\n castling_control[color] = cont[0], cont[1] - storlg + d, cont[2\n ] + storlg + d\n castling_control[color] = castling_control[color][0] + d, castling_control[\n color][1], castling_control[color][2]\n\n\ndef rook(field, color, old, new, d):\n global castling_control\n hor = 0 if color == 1 else 7\n cont = castling_control[color]\n x, y = old if d == 1 else new\n if x == hor and y % 7 == 0:\n castling_control[color] = cont[0], cont[1] + d * (-sign(y - 3) + 1\n ), cont[2] + d * (sign(y - 3) + 1)\n\n\ndef trans_pawn(color, old):\n return True if old[0] * color % 7 == 6 else False\n\n\ndef take_on_aisle_pawn(color, old, new):\n global take_on_aisle\n if abs(new[0] - old[0]) == 2:\n take_on_aisle = color, new[1]\n else:\n take_on_aisle = 'l', 8\n return take_on_aisle\n\n\ndef take_on_aisle_move(field, color, old, new, fig, d, main):\n global take_on_aisle\n if main == 1:\n take_on_aisle_pawn(color, old, new)\n if abs(old[1] - new[1]) == 1:\n if field[new[0]][new[1]] == 0 and d == 1:\n field[old[0]][new[1]] = 0\n if fig == 0 and d == -1:\n field[new[0]][old[1]] = -color\n\n\ndef move(field, old, new, fig=0, d=1, trans_fig=1, main=0):\n global trans, take_on_aisle\n color = sign(field[old[0]][old[1]])\n figure = abs(field[old[0]][old[1]])\n if figure == 2:\n rook(field, color, old, new, d)\n if figure == 6:\n king_and_castling(field, color, old, new, d)\n if trans == True:\n figure = 1\n trans = False\n if figure == 1:\n trans = trans_pawn(color, old) if d == 1 else False\n if trans == True:\n figure = trans_fig\n take_on_aisle_move(field, color, old, new, fig, d, main)\n if main == 1:\n trans = False\n field[new[0]][new[1]] = color * figure\n field[old[0]][old[1]] = fig\n",
"step-5": "from field import print_field\nfrom math_utilite import sign, col\n\n\ndef start_parameter_2(par):\n global cell_king, castling_control, trans, take_on_aisle\n cell_king = par[0]\n castling_control = par[1]\n trans = par[2]\n take_on_aisle = par[3]\n \ndef det_cell_king(field):\n global cell_king\n cell_king = {sign(fig):(x, y) for x, row in enumerate(field) for y, fig in enumerate(row) if abs(fig)==6}\n return cell_king\n\ndef det_castling_control(field):\n global castling_control\n for color in (1, -1):\n hor = 0 if color == 1 else 7\n dk = 0 if field[hor][4] == 6*color else 1\n dlr = 0 if field[hor][0] == 2*color else 1\n drr = 0 if field[hor][-1] == 2*color else 1\n castling_control[color] = (dk, dlr, drr)\n return castling_control\n \n \ndef king_and_castling(field, color, old, new, d):\n global cell_king, castling_control\n cell_king[color] = (new[0], new[1])\n storlg=new[1]-old[1]\n if abs(storlg) == 2:\n storlg = sign(storlg)\n rp = 7 if storlg*d == 1 else 0\n field[new[0]][new[1]-storlg] = 2*color if d == 1 else 0\n field[new[0]][rp] = 0 if d == 1 else 2*color\n cont = castling_control[color] \n castling_control[color] = (cont[0], cont[1]-storlg+d, cont[2]+storlg+d)\n castling_control[color] = (castling_control[color][0]+d, castling_control[color][1], castling_control[color][2])\n\ndef rook(field, color, old, new, d):\n global castling_control\n hor = 0 if color == 1 else 7\n cont = castling_control[color]\n x, y = old if d == 1 else new\n if x == hor and y % 7 == 0:\n castling_control[color] = (cont[0], cont[1] + d*(-sign(y-3)+1), cont[2] + d*(sign(y-3)+1))\n\ndef trans_pawn(color, old):\n return True if (old[0] * color) % 7 == 6 else False\n\ndef take_on_aisle_pawn(color, old, new):\n global take_on_aisle\n if abs(new[0]-old[0]) == 2:\n take_on_aisle = (color, new[1])\n else:\n take_on_aisle = ('l', 8)\n return take_on_aisle\n\ndef take_on_aisle_move(field, color, old, new, fig, d, main):\n global take_on_aisle\n if main == 1:\n take_on_aisle_pawn(color, old, new)\n if abs(old[1]-new[1]) == 1:\n if field[new[0]][new[1]] == 0 and d == 1:\n field[old[0]][new[1]] = 0\n if fig == 0 and d == -1:\n field[new[0]][old[1]] = -color\n\ndef move(field, old, new, fig=0, d=1, trans_fig=1, main=0):\n global trans, take_on_aisle\n color = sign(field[old[0]][old[1]])\n figure = abs(field[old[0]][old[1]])\n if figure == 2:\n rook(field, color, old, new, d)\n if figure == 6:\n king_and_castling(field, color, old, new, d)\n if trans == True:\n figure = 1\n trans = False\n if figure == 1:\n trans = trans_pawn(color, old) if d == 1 else False \n if trans == True: \n figure = trans_fig \n take_on_aisle_move(field, color, old, new, fig, d, main)\n if main == 1:\n trans = False\n field[new[0]][new[1]] = color*figure\n field[old[0]][old[1]] = fig\n\n\n\n",
"step-ids": [
4,
8,
9,
10,
11
]
}
|
[
4,
8,
9,
10,
11
] |
import csv
import sys
if len(sys.argv[1:]) == 5 :
(name_pos, start_pos, length_pos,
first_note_pos, second_note_pos) = [int(pos) for pos in sys.argv[1:]]
elif len(sys.argv[1:]) == 4 :
(name_pos, start_pos, length_pos,
first_note_pos) = [int(pos) for pos in sys.argv[1:]]
second_note_pos = None
else :
name_pos, start_pos, length_pos, first_note_pos, second_note_pos = 5, 3, 4, 2, 1
blacklist=("Blank", "semicolon filler")
reader = csv.reader(sys.stdin)
writer = csv.writer(sys.stdout)
writer.writerow(('column', 'start', 'length'))
for row in reader :
try :
if not row[name_pos].strip() or row[name_pos].strip() in blacklist :
continue
except IndexError :
continue
if second_note_pos is not None and row[second_note_pos].strip() :
col_name = '; '.join(name.strip() for name in (row[name_pos],
row[first_note_pos],
row[second_note_pos]))
elif row[first_note_pos].strip() :
col_name = '; '.join(name.strip() for name in (row[name_pos],
row[first_note_pos]))
else :
col_name = row[name_pos].strip()
col_start = int(row[start_pos].split('-')[0].strip())
col_length = int(float(row[length_pos])) - 1
writer.writerow((col_name, col_start, col_length))
|
normal
|
{
"blob_id": "d7653a205fb8203fed4009846780c63dd1bcb505",
"index": 3603,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif len(sys.argv[1:]) == 5:\n name_pos, start_pos, length_pos, first_note_pos, second_note_pos = [int\n (pos) for pos in sys.argv[1:]]\nelif len(sys.argv[1:]) == 4:\n name_pos, start_pos, length_pos, first_note_pos = [int(pos) for pos in\n sys.argv[1:]]\n second_note_pos = None\nelse:\n name_pos, start_pos, length_pos, first_note_pos, second_note_pos = (5, \n 3, 4, 2, 1)\n<mask token>\nwriter.writerow(('column', 'start', 'length'))\nfor row in reader:\n try:\n if not row[name_pos].strip() or row[name_pos].strip() in blacklist:\n continue\n except IndexError:\n continue\n if second_note_pos is not None and row[second_note_pos].strip():\n col_name = '; '.join(name.strip() for name in (row[name_pos], row[\n first_note_pos], row[second_note_pos]))\n elif row[first_note_pos].strip():\n col_name = '; '.join(name.strip() for name in (row[name_pos], row[\n first_note_pos]))\n else:\n col_name = row[name_pos].strip()\n col_start = int(row[start_pos].split('-')[0].strip())\n col_length = int(float(row[length_pos])) - 1\n writer.writerow((col_name, col_start, col_length))\n",
"step-3": "<mask token>\nif len(sys.argv[1:]) == 5:\n name_pos, start_pos, length_pos, first_note_pos, second_note_pos = [int\n (pos) for pos in sys.argv[1:]]\nelif len(sys.argv[1:]) == 4:\n name_pos, start_pos, length_pos, first_note_pos = [int(pos) for pos in\n sys.argv[1:]]\n second_note_pos = None\nelse:\n name_pos, start_pos, length_pos, first_note_pos, second_note_pos = (5, \n 3, 4, 2, 1)\nblacklist = 'Blank', 'semicolon filler'\nreader = csv.reader(sys.stdin)\nwriter = csv.writer(sys.stdout)\nwriter.writerow(('column', 'start', 'length'))\nfor row in reader:\n try:\n if not row[name_pos].strip() or row[name_pos].strip() in blacklist:\n continue\n except IndexError:\n continue\n if second_note_pos is not None and row[second_note_pos].strip():\n col_name = '; '.join(name.strip() for name in (row[name_pos], row[\n first_note_pos], row[second_note_pos]))\n elif row[first_note_pos].strip():\n col_name = '; '.join(name.strip() for name in (row[name_pos], row[\n first_note_pos]))\n else:\n col_name = row[name_pos].strip()\n col_start = int(row[start_pos].split('-')[0].strip())\n col_length = int(float(row[length_pos])) - 1\n writer.writerow((col_name, col_start, col_length))\n",
"step-4": "import csv\nimport sys\nif len(sys.argv[1:]) == 5:\n name_pos, start_pos, length_pos, first_note_pos, second_note_pos = [int\n (pos) for pos in sys.argv[1:]]\nelif len(sys.argv[1:]) == 4:\n name_pos, start_pos, length_pos, first_note_pos = [int(pos) for pos in\n sys.argv[1:]]\n second_note_pos = None\nelse:\n name_pos, start_pos, length_pos, first_note_pos, second_note_pos = (5, \n 3, 4, 2, 1)\nblacklist = 'Blank', 'semicolon filler'\nreader = csv.reader(sys.stdin)\nwriter = csv.writer(sys.stdout)\nwriter.writerow(('column', 'start', 'length'))\nfor row in reader:\n try:\n if not row[name_pos].strip() or row[name_pos].strip() in blacklist:\n continue\n except IndexError:\n continue\n if second_note_pos is not None and row[second_note_pos].strip():\n col_name = '; '.join(name.strip() for name in (row[name_pos], row[\n first_note_pos], row[second_note_pos]))\n elif row[first_note_pos].strip():\n col_name = '; '.join(name.strip() for name in (row[name_pos], row[\n first_note_pos]))\n else:\n col_name = row[name_pos].strip()\n col_start = int(row[start_pos].split('-')[0].strip())\n col_length = int(float(row[length_pos])) - 1\n writer.writerow((col_name, col_start, col_length))\n",
"step-5": "import csv\nimport sys\n\nif len(sys.argv[1:]) == 5 :\n (name_pos, start_pos, length_pos, \n first_note_pos, second_note_pos) = [int(pos) for pos in sys.argv[1:]]\nelif len(sys.argv[1:]) == 4 :\n (name_pos, start_pos, length_pos, \n first_note_pos) = [int(pos) for pos in sys.argv[1:]]\n second_note_pos = None\nelse :\n name_pos, start_pos, length_pos, first_note_pos, second_note_pos = 5, 3, 4, 2, 1\n\nblacklist=(\"Blank\", \"semicolon filler\")\n\nreader = csv.reader(sys.stdin)\nwriter = csv.writer(sys.stdout)\nwriter.writerow(('column', 'start', 'length'))\n\nfor row in reader :\n try :\n if not row[name_pos].strip() or row[name_pos].strip() in blacklist :\n continue\n except IndexError :\n continue\n if second_note_pos is not None and row[second_note_pos].strip() :\n col_name = '; '.join(name.strip() for name in (row[name_pos], \n row[first_note_pos], \n row[second_note_pos]))\n elif row[first_note_pos].strip() :\n col_name = '; '.join(name.strip() for name in (row[name_pos], \n row[first_note_pos]))\n else :\n col_name = row[name_pos].strip()\n col_start = int(row[start_pos].split('-')[0].strip())\n col_length = int(float(row[length_pos])) - 1\n writer.writerow((col_name, col_start, col_length))\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.