Spaces:
Paused
Paused
Your Commit Message
Browse files- checkpoints/microsoft/phi-2/config.json +30 -0
- checkpoints/microsoft/phi-2/generation_config.json +6 -0
- checkpoints/microsoft/phi-2/lit_model.pth +3 -0
- checkpoints/microsoft/phi-2/model-00001-of-00002.bin +3 -0
- checkpoints/microsoft/phi-2/model-00002-of-00002.bin +3 -0
- checkpoints/microsoft/phi-2/model_config.yaml +28 -0
- checkpoints/microsoft/phi-2/tokenizer.json +0 -0
- checkpoints/microsoft/phi-2/tokenizer_config.json +323 -0
- data.py +12 -0
- output.json +1 -0
checkpoints/microsoft/phi-2/config.json
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "microsoft/phi-2",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"PhiForCausalLM"
|
| 5 |
+
],
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 50256,
|
| 8 |
+
"embd_pdrop": 0.0,
|
| 9 |
+
"eos_token_id": 50256,
|
| 10 |
+
"hidden_act": "gelu_new",
|
| 11 |
+
"hidden_size": 2560,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 10240,
|
| 14 |
+
"layer_norm_eps": 1e-05,
|
| 15 |
+
"max_position_embeddings": 2048,
|
| 16 |
+
"model_type": "phi",
|
| 17 |
+
"num_attention_heads": 32,
|
| 18 |
+
"num_hidden_layers": 32,
|
| 19 |
+
"num_key_value_heads": 32,
|
| 20 |
+
"partial_rotary_factor": 0.4,
|
| 21 |
+
"qk_layernorm": false,
|
| 22 |
+
"resid_pdrop": 0.1,
|
| 23 |
+
"rope_scaling": null,
|
| 24 |
+
"rope_theta": 10000.0,
|
| 25 |
+
"tie_word_embeddings": false,
|
| 26 |
+
"torch_dtype": "float16",
|
| 27 |
+
"transformers_version": "4.37.0",
|
| 28 |
+
"use_cache": true,
|
| 29 |
+
"vocab_size": 51200
|
| 30 |
+
}
|
checkpoints/microsoft/phi-2/generation_config.json
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"transformers_version": "4.37.0.dev0",
|
| 4 |
+
"eos_token_id": 50256,
|
| 5 |
+
"bos_token_id": 50256
|
| 6 |
+
}
|
checkpoints/microsoft/phi-2/lit_model.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e4b0dc84b6b8f2519a608092813ad37f30c11d663b4c1dafd0bf940f7704fe9e
|
| 3 |
+
size 5559456306
|
checkpoints/microsoft/phi-2/model-00001-of-00002.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:637b705eb2051282e4f711d0ec56029dc6972a756ca812201f48de5157252c7d
|
| 3 |
+
size 4995676315
|
checkpoints/microsoft/phi-2/model-00002-of-00002.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f74b82f6b14f84c1d6bd162f0ea72cdc7839bff9b4929e8de7e849b0257bfb16
|
| 3 |
+
size 563839670
|
checkpoints/microsoft/phi-2/model_config.yaml
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
bias: true
|
| 2 |
+
block_size: 2048
|
| 3 |
+
gelu_approximate: tanh
|
| 4 |
+
head_size: 80
|
| 5 |
+
hf_config:
|
| 6 |
+
name: phi-2
|
| 7 |
+
org: microsoft
|
| 8 |
+
intermediate_size: 10240
|
| 9 |
+
lm_head_bias: true
|
| 10 |
+
mlp_class_name: GptNeoxMLP
|
| 11 |
+
n_embd: 2560
|
| 12 |
+
n_expert: 0
|
| 13 |
+
n_expert_per_token: 0
|
| 14 |
+
n_head: 32
|
| 15 |
+
n_layer: 32
|
| 16 |
+
n_query_groups: 32
|
| 17 |
+
name: phi-2
|
| 18 |
+
norm_class_name: LayerNorm
|
| 19 |
+
norm_eps: 1.0e-05
|
| 20 |
+
padded_vocab_size: 51200
|
| 21 |
+
padding_multiple: 512
|
| 22 |
+
parallel_residual: true
|
| 23 |
+
rope_base: 10000
|
| 24 |
+
rope_condense_ratio: 1
|
| 25 |
+
rotary_percentage: 0.4
|
| 26 |
+
scale_embeddings: false
|
| 27 |
+
shared_attention_norm: true
|
| 28 |
+
vocab_size: 50257
|
checkpoints/microsoft/phi-2/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
checkpoints/microsoft/phi-2/tokenizer_config.json
ADDED
|
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"added_tokens_decoder": {
|
| 4 |
+
"50256": {
|
| 5 |
+
"content": "<|endoftext|>",
|
| 6 |
+
"lstrip": false,
|
| 7 |
+
"normalized": false,
|
| 8 |
+
"rstrip": false,
|
| 9 |
+
"single_word": false,
|
| 10 |
+
"special": true
|
| 11 |
+
},
|
| 12 |
+
"50257": {
|
| 13 |
+
"content": " ",
|
| 14 |
+
"lstrip": false,
|
| 15 |
+
"normalized": true,
|
| 16 |
+
"rstrip": false,
|
| 17 |
+
"single_word": false,
|
| 18 |
+
"special": false
|
| 19 |
+
},
|
| 20 |
+
"50258": {
|
| 21 |
+
"content": " ",
|
| 22 |
+
"lstrip": false,
|
| 23 |
+
"normalized": true,
|
| 24 |
+
"rstrip": false,
|
| 25 |
+
"single_word": false,
|
| 26 |
+
"special": false
|
| 27 |
+
},
|
| 28 |
+
"50259": {
|
| 29 |
+
"content": " ",
|
| 30 |
+
"lstrip": false,
|
| 31 |
+
"normalized": true,
|
| 32 |
+
"rstrip": false,
|
| 33 |
+
"single_word": false,
|
| 34 |
+
"special": false
|
| 35 |
+
},
|
| 36 |
+
"50260": {
|
| 37 |
+
"content": " ",
|
| 38 |
+
"lstrip": false,
|
| 39 |
+
"normalized": true,
|
| 40 |
+
"rstrip": false,
|
| 41 |
+
"single_word": false,
|
| 42 |
+
"special": false
|
| 43 |
+
},
|
| 44 |
+
"50261": {
|
| 45 |
+
"content": " ",
|
| 46 |
+
"lstrip": false,
|
| 47 |
+
"normalized": true,
|
| 48 |
+
"rstrip": false,
|
| 49 |
+
"single_word": false,
|
| 50 |
+
"special": false
|
| 51 |
+
},
|
| 52 |
+
"50262": {
|
| 53 |
+
"content": " ",
|
| 54 |
+
"lstrip": false,
|
| 55 |
+
"normalized": true,
|
| 56 |
+
"rstrip": false,
|
| 57 |
+
"single_word": false,
|
| 58 |
+
"special": false
|
| 59 |
+
},
|
| 60 |
+
"50263": {
|
| 61 |
+
"content": " ",
|
| 62 |
+
"lstrip": false,
|
| 63 |
+
"normalized": true,
|
| 64 |
+
"rstrip": false,
|
| 65 |
+
"single_word": false,
|
| 66 |
+
"special": false
|
| 67 |
+
},
|
| 68 |
+
"50264": {
|
| 69 |
+
"content": " ",
|
| 70 |
+
"lstrip": false,
|
| 71 |
+
"normalized": true,
|
| 72 |
+
"rstrip": false,
|
| 73 |
+
"single_word": false,
|
| 74 |
+
"special": false
|
| 75 |
+
},
|
| 76 |
+
"50265": {
|
| 77 |
+
"content": " ",
|
| 78 |
+
"lstrip": false,
|
| 79 |
+
"normalized": true,
|
| 80 |
+
"rstrip": false,
|
| 81 |
+
"single_word": false,
|
| 82 |
+
"special": false
|
| 83 |
+
},
|
| 84 |
+
"50266": {
|
| 85 |
+
"content": " ",
|
| 86 |
+
"lstrip": false,
|
| 87 |
+
"normalized": true,
|
| 88 |
+
"rstrip": false,
|
| 89 |
+
"single_word": false,
|
| 90 |
+
"special": false
|
| 91 |
+
},
|
| 92 |
+
"50267": {
|
| 93 |
+
"content": " ",
|
| 94 |
+
"lstrip": false,
|
| 95 |
+
"normalized": true,
|
| 96 |
+
"rstrip": false,
|
| 97 |
+
"single_word": false,
|
| 98 |
+
"special": false
|
| 99 |
+
},
|
| 100 |
+
"50268": {
|
| 101 |
+
"content": " ",
|
| 102 |
+
"lstrip": false,
|
| 103 |
+
"normalized": true,
|
| 104 |
+
"rstrip": false,
|
| 105 |
+
"single_word": false,
|
| 106 |
+
"special": false
|
| 107 |
+
},
|
| 108 |
+
"50269": {
|
| 109 |
+
"content": " ",
|
| 110 |
+
"lstrip": false,
|
| 111 |
+
"normalized": true,
|
| 112 |
+
"rstrip": false,
|
| 113 |
+
"single_word": false,
|
| 114 |
+
"special": false
|
| 115 |
+
},
|
| 116 |
+
"50270": {
|
| 117 |
+
"content": " ",
|
| 118 |
+
"lstrip": false,
|
| 119 |
+
"normalized": true,
|
| 120 |
+
"rstrip": false,
|
| 121 |
+
"single_word": false,
|
| 122 |
+
"special": false
|
| 123 |
+
},
|
| 124 |
+
"50271": {
|
| 125 |
+
"content": " ",
|
| 126 |
+
"lstrip": false,
|
| 127 |
+
"normalized": true,
|
| 128 |
+
"rstrip": false,
|
| 129 |
+
"single_word": false,
|
| 130 |
+
"special": false
|
| 131 |
+
},
|
| 132 |
+
"50272": {
|
| 133 |
+
"content": " ",
|
| 134 |
+
"lstrip": false,
|
| 135 |
+
"normalized": true,
|
| 136 |
+
"rstrip": false,
|
| 137 |
+
"single_word": false,
|
| 138 |
+
"special": false
|
| 139 |
+
},
|
| 140 |
+
"50273": {
|
| 141 |
+
"content": " ",
|
| 142 |
+
"lstrip": false,
|
| 143 |
+
"normalized": true,
|
| 144 |
+
"rstrip": false,
|
| 145 |
+
"single_word": false,
|
| 146 |
+
"special": false
|
| 147 |
+
},
|
| 148 |
+
"50274": {
|
| 149 |
+
"content": " ",
|
| 150 |
+
"lstrip": false,
|
| 151 |
+
"normalized": true,
|
| 152 |
+
"rstrip": false,
|
| 153 |
+
"single_word": false,
|
| 154 |
+
"special": false
|
| 155 |
+
},
|
| 156 |
+
"50275": {
|
| 157 |
+
"content": " ",
|
| 158 |
+
"lstrip": false,
|
| 159 |
+
"normalized": true,
|
| 160 |
+
"rstrip": false,
|
| 161 |
+
"single_word": false,
|
| 162 |
+
"special": false
|
| 163 |
+
},
|
| 164 |
+
"50276": {
|
| 165 |
+
"content": " ",
|
| 166 |
+
"lstrip": false,
|
| 167 |
+
"normalized": true,
|
| 168 |
+
"rstrip": false,
|
| 169 |
+
"single_word": false,
|
| 170 |
+
"special": false
|
| 171 |
+
},
|
| 172 |
+
"50277": {
|
| 173 |
+
"content": " ",
|
| 174 |
+
"lstrip": false,
|
| 175 |
+
"normalized": true,
|
| 176 |
+
"rstrip": false,
|
| 177 |
+
"single_word": false,
|
| 178 |
+
"special": false
|
| 179 |
+
},
|
| 180 |
+
"50278": {
|
| 181 |
+
"content": " ",
|
| 182 |
+
"lstrip": false,
|
| 183 |
+
"normalized": true,
|
| 184 |
+
"rstrip": false,
|
| 185 |
+
"single_word": false,
|
| 186 |
+
"special": false
|
| 187 |
+
},
|
| 188 |
+
"50279": {
|
| 189 |
+
"content": " ",
|
| 190 |
+
"lstrip": false,
|
| 191 |
+
"normalized": true,
|
| 192 |
+
"rstrip": false,
|
| 193 |
+
"single_word": false,
|
| 194 |
+
"special": false
|
| 195 |
+
},
|
| 196 |
+
"50280": {
|
| 197 |
+
"content": " ",
|
| 198 |
+
"lstrip": false,
|
| 199 |
+
"normalized": true,
|
| 200 |
+
"rstrip": false,
|
| 201 |
+
"single_word": false,
|
| 202 |
+
"special": false
|
| 203 |
+
},
|
| 204 |
+
"50281": {
|
| 205 |
+
"content": " ",
|
| 206 |
+
"lstrip": false,
|
| 207 |
+
"normalized": true,
|
| 208 |
+
"rstrip": false,
|
| 209 |
+
"single_word": false,
|
| 210 |
+
"special": false
|
| 211 |
+
},
|
| 212 |
+
"50282": {
|
| 213 |
+
"content": " ",
|
| 214 |
+
"lstrip": false,
|
| 215 |
+
"normalized": true,
|
| 216 |
+
"rstrip": false,
|
| 217 |
+
"single_word": false,
|
| 218 |
+
"special": false
|
| 219 |
+
},
|
| 220 |
+
"50283": {
|
| 221 |
+
"content": " ",
|
| 222 |
+
"lstrip": false,
|
| 223 |
+
"normalized": true,
|
| 224 |
+
"rstrip": false,
|
| 225 |
+
"single_word": false,
|
| 226 |
+
"special": false
|
| 227 |
+
},
|
| 228 |
+
"50284": {
|
| 229 |
+
"content": " ",
|
| 230 |
+
"lstrip": false,
|
| 231 |
+
"normalized": true,
|
| 232 |
+
"rstrip": false,
|
| 233 |
+
"single_word": false,
|
| 234 |
+
"special": false
|
| 235 |
+
},
|
| 236 |
+
"50285": {
|
| 237 |
+
"content": " ",
|
| 238 |
+
"lstrip": false,
|
| 239 |
+
"normalized": true,
|
| 240 |
+
"rstrip": false,
|
| 241 |
+
"single_word": false,
|
| 242 |
+
"special": false
|
| 243 |
+
},
|
| 244 |
+
"50286": {
|
| 245 |
+
"content": " ",
|
| 246 |
+
"lstrip": false,
|
| 247 |
+
"normalized": true,
|
| 248 |
+
"rstrip": false,
|
| 249 |
+
"single_word": false,
|
| 250 |
+
"special": false
|
| 251 |
+
},
|
| 252 |
+
"50287": {
|
| 253 |
+
"content": "\t\t\t\t\t\t\t\t\t",
|
| 254 |
+
"lstrip": false,
|
| 255 |
+
"normalized": true,
|
| 256 |
+
"rstrip": false,
|
| 257 |
+
"single_word": false,
|
| 258 |
+
"special": false
|
| 259 |
+
},
|
| 260 |
+
"50288": {
|
| 261 |
+
"content": "\t\t\t\t\t\t\t\t",
|
| 262 |
+
"lstrip": false,
|
| 263 |
+
"normalized": true,
|
| 264 |
+
"rstrip": false,
|
| 265 |
+
"single_word": false,
|
| 266 |
+
"special": false
|
| 267 |
+
},
|
| 268 |
+
"50289": {
|
| 269 |
+
"content": "\t\t\t\t\t\t\t",
|
| 270 |
+
"lstrip": false,
|
| 271 |
+
"normalized": true,
|
| 272 |
+
"rstrip": false,
|
| 273 |
+
"single_word": false,
|
| 274 |
+
"special": false
|
| 275 |
+
},
|
| 276 |
+
"50290": {
|
| 277 |
+
"content": "\t\t\t\t\t\t",
|
| 278 |
+
"lstrip": false,
|
| 279 |
+
"normalized": true,
|
| 280 |
+
"rstrip": false,
|
| 281 |
+
"single_word": false,
|
| 282 |
+
"special": false
|
| 283 |
+
},
|
| 284 |
+
"50291": {
|
| 285 |
+
"content": "\t\t\t\t\t",
|
| 286 |
+
"lstrip": false,
|
| 287 |
+
"normalized": true,
|
| 288 |
+
"rstrip": false,
|
| 289 |
+
"single_word": false,
|
| 290 |
+
"special": false
|
| 291 |
+
},
|
| 292 |
+
"50292": {
|
| 293 |
+
"content": "\t\t\t\t",
|
| 294 |
+
"lstrip": false,
|
| 295 |
+
"normalized": true,
|
| 296 |
+
"rstrip": false,
|
| 297 |
+
"single_word": false,
|
| 298 |
+
"special": false
|
| 299 |
+
},
|
| 300 |
+
"50293": {
|
| 301 |
+
"content": "\t\t\t",
|
| 302 |
+
"lstrip": false,
|
| 303 |
+
"normalized": true,
|
| 304 |
+
"rstrip": false,
|
| 305 |
+
"single_word": false,
|
| 306 |
+
"special": false
|
| 307 |
+
},
|
| 308 |
+
"50294": {
|
| 309 |
+
"content": "\t\t",
|
| 310 |
+
"lstrip": false,
|
| 311 |
+
"normalized": true,
|
| 312 |
+
"rstrip": false,
|
| 313 |
+
"single_word": false,
|
| 314 |
+
"special": false
|
| 315 |
+
}
|
| 316 |
+
},
|
| 317 |
+
"bos_token": "<|endoftext|>",
|
| 318 |
+
"clean_up_tokenization_spaces": true,
|
| 319 |
+
"eos_token": "<|endoftext|>",
|
| 320 |
+
"model_max_length": 2048,
|
| 321 |
+
"tokenizer_class": "CodeGenTokenizer",
|
| 322 |
+
"unk_token": "<|endoftext|>"
|
| 323 |
+
}
|
data.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from datasets import load_dataset
|
| 3 |
+
|
| 4 |
+
dataset1 = load_dataset("Kaballas/100")
|
| 5 |
+
|
| 6 |
+
# Assuming that 'instruction', 'input', and 'output' are keys in your dataset
|
| 7 |
+
# If not, you'll need to modify this part to extract the correct data
|
| 8 |
+
data = [{'instruction': item['instruction'], 'input': item['input'], 'output': item['output']} for item in dataset1['train']]
|
| 9 |
+
|
| 10 |
+
# Write the data to a JSON file
|
| 11 |
+
with open('output.json', 'w') as f:
|
| 12 |
+
json.dump(data, f)
|
output.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
[{"instruction": "What is the main purpose of the Development environment during the Build stage?", "input": "", "output": "The Development environment is used for all build activities of the solution and performing Iteration Playbacks during the Build stage."}, {"instruction": "What data is used for Playbacks 1 and 2 in the Data environment?", "input": "", "output": "Scrambled data is used for Playbacks 1 and 2 in the Data environment."}, {"instruction": "What information does the Data environment contain and who has access to it?", "input": "", "output": "The Data environment contains unscrambled, sensitive information. Access is limited to the team performing data migration activities and administrators who have been cleared to access sensitive information."}, {"instruction": "What activities are performed in the Test environment during Iteration 3 and later stages?", "input": "", "output": "During the later stages of Iteration 3, the Test environment is manually configured, including moving all build configuration into Test and enabling integration with downstream applications such as AD. Once configuration has been migrated to the Test environment, scrambled data is loaded for SIT activities."}, {"instruction": "What is the role of the Parallel Payroll environment during Iteration 3 and Dress Rehearsal?", "input": "", "output": "During Iteration 3 and the Dress Rehearsal, the Test environment is copied to the Parallel Payroll environment to provide the configuration required for Parallel Payroll Test 1 without copying the scrambled data."}, {"instruction": "What happens to the Playpen environment at the end of the Build stage?", "input": "", "output": "At the end of the Build stage, the Playpen environment transitions to become the Training environment, supporting development and delivery of training activities."}, {"instruction": "What is the purpose of the Development environment during the Build stage?", "input": "", "output": "The Development environment is used to perform all build activities of the solution and for all Iteration Playbacks."}, {"instruction": "What type of data is used for Iteration Playbacks 1 and 2 in the Development environment?", "input": "", "output": "Scrambled data is used for Iteration Playbacks 1 and 2 because sensitive data is not required."}, {"instruction": "Why is access to the Data environment restricted?", "input": "", "output": "Access to the Data environment is restricted because it contains unscrambled, sensitive information."}, {"instruction": "When is the Test environment first used during the Build stage?", "input": "", "output": "The Test environment is not used until Iteration 3 commences."}, {"instruction": "What is the purpose of copying the Test environment to the Parallel Payroll environment?", "input": "", "output": "The Test environment is copied to the Parallel Payroll environment to provide the necessary configuration for Parallel Payroll Test 1 without copying scrambled data, as Parallel Payroll activities require unscrambled data."}, {"instruction": "What is the primary purpose of the Playpen environment during the Build stage?", "input": "", "output": "The Playpen environment is primarily used for business familiarization activities during the Build stage."}, {"instruction": "When will the Playpen environment transition to become the Training environment?", "input": "", "output": "The Playpen environment will transition to become the Training environment at the end of the Build stage."}, {"instruction": "What is the main purpose of the WIT QA Tool?", "input": "", "output": "The WIT QA Tool is used to validate the compliance of historic payroll solutions with Award Agreements and, during the Testing stage, the compliance of the HRplus solution with those Awards."}, {"instruction": "What happens in the Development environment during the Testing stage?", "input": "", "output": "In the Development environment, defects identified in any test activities are resolved and unit tested. Once resolved and unit tested, the resolutions are migrated to a downstream environment for further testing and creation of training material."}, {"instruction": "What is the role of the Data environment during the Testing stage?", "input": "", "output": "The Data environment is used for unit testing ETL activities and resolving any defects in ETL rules. It is also used for validating the data migrated during the pilot release and interim processes in place during deployments."}, {"instruction": "What activities are carried out in the Test environment during the Testing stage?", "input": "", "output": "The Test environment is used for all testing activities except User Acceptance Testing (UAT), Parallel Payroll (PPR) Testing, Interim Solution/Process Testing, and Unit Testing which take place in other environments."}, {"instruction": "What happens in the Parallel Payroll Environment during the Testing stage?", "input": "", "output": "The Parallel Payroll Environment is used for PPR activities. Changes resulting from defect resolution in the Development environment are migrated to this environment during the Testing stage."}, {"instruction": "What happens at the start of the Testing stage in the Data and Test environments?", "input": "", "output": "At the start of the Testing stage, the Data environment is refreshed from the Test environment, while the Test environment is fully configured with the latest scrambled data. These form the basis for SIT activities in their respective environments."}, {"instruction": "What happens to the Test environment after SIT?", "input": "", "output": "After SIT, the Test environment continues to support validating any changes to the solution before these changes are progressed to the UAT environment, but downstream applications are no longer connected to it."}, {"instruction": "What is the main purpose of the WIT QA Tool?", "input": "", "output": "The WIT QA Tool is used to ensure that both past payroll solutions and the new HRplus solution comply with Award Agreements and Industrial Instruments."}, {"instruction": "What is the function of the Development Environment during the Testing stage?", "input": "", "output": "The Development Environment is used to fix and unit test defects found during testing activities. Once resolved, these fixes are then moved to other environments for further testing and training material creation."}, {"instruction": "What kind of data is used in the Test Environment during the Testing stage?", "input": "", "output": "The Test Environment utilizes the latest scrambled data along with the full system configuration for SIT (System Integration Testing) activities."}, {"instruction": "How does the Parallel Payroll Environment change throughout the Testing stage?", "input": "", "output": "Initially, the Parallel Payroll Environment is fully configured but contains no data. As defects are resolved in the Development Environment, the fixes are migrated to the Parallel Payroll Environment after approval during the daily defect triage."}, {"instruction": "Which testing activities are not conducted in the Test Environment?", "input": "", "output": "The Test Environment is not used for User Acceptance Testing (UAT), Parallel Payroll (PPR) Testing, Interim Solution/Process Testing, or Unit Testing."}, {"instruction": "What happens during the start of the Testing stage to the data migration environment?", "input": "", "output": "At the start of the Testing stage, a data migration with unscrambled data is performed into the Parallel Payroll environment to support the first PPR test."}, {"instruction": "What is the purpose of the UAT Environment commissioning during the Testing Stage?", "input": "", "output": "The UAT Environment is commissioned during the Testing Stage to perform a Dress Rehearsal (DR3), where all configuration will be manually migrated to the UAT Environment, and an ETL (ETL4) will be performed to migrate data into the UAT environment for usage in UAT."}, {"instruction": "What is the role of the Training environment during the Testing stage?", "input": "", "output": "The Training environment is used during the Testing stage for preparation and development of training material, delivery of training activities, showcases of functionality, creation of Roster Patterns for migration to production during cutover, and training activities."}, {"instruction": "What is the Production environment used for during the Testing stage?", "input": "", "output": "The Production environment is not used during the Testing stage."}, {"instruction": "What is the purpose of the WIT QA Tool during the Testing stage?", "input": "", "output": "The WIT QA Tool is used during the Testing stage to validate both compliance of historic payroll solutions with the Award Agreements and compliance of the HRplus solution with those Awards."}, {"instruction": "What is the purpose of the data migration between PPR2 and PPR3?", "input": "", "output": "The data migration between PPR2 and PPR3 is a delta load to include new hires, terminations, movements, and retro changes to timecards that occurred in the legacy production environments."}, {"instruction": "When will the UAT environment be commissioned?", "input": "", "output": "The UAT environment will be commissioned on 1 June 2023 during the Testing Stage."}, {"instruction": "What is the purpose of DR3?", "input": "", "output": "DR3 is a Dress Rehearsal where all configuration will be manually migrated to the UAT Environment."}, {"instruction": "What will the Training environment be used for during the Testing stage?", "input": "", "output": "The Training environment will be used for preparation and development of training material, delivery of training activities, showcases of functionality, and creation of roster patterns."}, {"instruction": "Why are two dummy HSPs created in the Training environment?", "input": "", "output": "The two dummy HSPs, copied from Royal Perth Hospital's structure, will be used for training activities and showcases."}, {"instruction": "How is the data in the Training environment managed during training activities?", "input": "", "output": "The data under the Training dummy HSP will be regularly refreshed following each training activity to reset it for the next session."}, {"instruction": "What is the role of the WIT QA Tool during the Testing stage?", "input": "", "output": "The WIT QA Tool is used to validate the compliance of both historic payroll solutions and the HRplus solution with Award Agreements."}, {"instruction": "What is the purpose of the Development environment during the Pilot and Deployment stage?", "input": "", "output": "The Development environment is used to resolve defects identified in production or during testing activities, and to support unit testing activities. It is regularly refreshed from production to provide up-to-date data."}, {"instruction": "What is the role of the Data Environment in the Pilot and Deployment stage?", "input": "", "output": "The Data Environment is used to validate Deployments before migration to production. It is refreshed from Production at the start of the Pilot and Deployment stage and before each Deployment for accurate validation. Pay Simulation(s) are performed for each Deployment managed in this environment."}, {"instruction": "What activities are performed in the Test Environment during the Pilot and Deployment stage?", "input": "", "output": "The Test Environment is used for all BAU testing activities except unit testing. It is refreshed from Production at a suitable point to ensure accurate validation. Defect resolutions from the Development Environment are migrated to the Test Environment subject to approval."}, {"instruction": "What is the function of the Parallel Payroll Environment in the Pilot and Deployment stage?", "input": "", "output": "The Parallel Payroll Environment is used to validate Deployments before each Deployment is migrated to production. It is refreshed from Production at the start of the Pilot and Deployment stage and before each Deployment. Pay Simulation(s) are performed for each Deployment managed in this environment."}, {"instruction": "What is the purpose of the Development Environment during the Pilot and Deployment stage?", "input": "", "output": "The Development Environment is used to resolve defects found in production or during testing activities."}, {"instruction": "How often is the Development environment refreshed from production?", "input": "", "output": "The Development environment (excluding SAP ECP) is refreshed regularly. SAP ECP is refreshed on an as-needed basis."}, {"instruction": "What is the purpose of refreshing the Data Environment from the Production environment at the start of each Deployment?", "input": "", "output": "Refreshing the Data Environment ensures the latest configuration and data are used, making validation as accurate as possible."}, {"instruction": "Which deployments are NOT validated in the Parallel Payroll Environment?", "input": "", "output": "Peel Health Campus, CAHS Services, and PCH deployments are not validated in the Parallel Payroll Environment."}, {"instruction": "What is the purpose of the Test Environment during the Pilot and Deployment stage?", "input": "", "output": "The Test Environment is used for all BAU testing activities, except unit testing."}, {"instruction": "What factors determine the timing of refreshing the Test Environment from the Production environment?", "input": "", "output": "The timing considers outstanding testing activities, the need to retain a copy of tested data, and support pack release schedules."}, {"instruction": "What is the purpose of the Training environment during the Pilot and Deployment stage?", "input": "", "output": "The Training environment is used for ongoing business user training during the Pilot and Deployment stage."}, {"instruction": "Who is responsible for migrating changes from the Development environment to the Training environment?", "input": "", "output": "The developer responsible for resolving the defect is responsible for migrating changes from the Development environment to the Training environment."}, {"instruction": "What activities occur in the Development environment during the BAU stage?", "input": "", "output": "During the BAU stage, the Development environment is used to resolve defects identified in production or during testing activities."}, {"instruction": "What happens to the Data environment after the final Deployment and before the Training environment is released?", "input": "", "output": "The Data environment will be refreshed from the Training environment in order to remove unscrambled data from the environment."}, {"instruction": "What is the role of the Test environment during the BAU stage?", "input": "", "output": "The Test environment is used during the BAU stage for all BAU testing activities except unit testing."}, {"instruction": "What components make up the Development environment?", "input": "", "output": "The Development environment is made up of components such as the HRPlus Application, Reporting Portal, Active Directory, Deloitte PeopleForms, Snowflake, Data Remedy, Schedule Planning, SF EC, UKG Dimensions, SAP CPI, WidgetBrain, SF ECP, 3rd party/legacy Apps, and Agency Management."}, {"instruction": "What is the purpose of the Training environment during the Pilot and Deployment stage?", "input": "", "output": "The Training environment is used for ongoing business user training during the Pilot and Deployment stage."}, {"instruction": "When will the Data environment no longer be available?", "input": "", "output": "The Data environment will no longer be available from 2027 onward."}, {"instruction": "What type of testing is conducted in the Development environment during the BAU stage?", "input": "", "output": "Unit testing is conducted in the Development environment during the BAU stage."}, {"instruction": "Why is the Test environment refreshed from production regularly?", "input": "", "output": "The Test environment is refreshed from production on a regular basis to provide up-to-date data to support BAU testing activities."}, {"instruction": "Which SuccessFactors component is used in the Development environment?", "input": "", "output": "The Development environment uses the SuccessFactors EC component."}, {"instruction": "What is the name of the client in the Development ECP instance?", "input": "", "output": "The name of the client in the Development ECP instance is Y7H 100."}, {"instruction": "Which shared environment does SAP CPI utilize?", "input": "", "output": "SAP CPI uses the shared non-prod environment wah-ci-test."}, {"instruction": "How does the HRplus solution manage and store its data?", "input": "", "output": "The HRplus solution utilizes a shared Snowflake account to manage and store its data via databases and schemas."}, {"instruction": "What tool is used for data ingestion into HRplus Snowflake databases?", "input": "", "output": "Talend, hosted on AWS, is used for data ingestion into HRplus Snowflake databases."}, {"instruction": "What is the purpose of the Reporting Portal?", "input": "", "output": "The Reporting Portal acts as a centralized serving layer to host custom Power BI reports developed for HRplus solutions. It allows users to access various HRplus reports in one place."}, {"instruction": "What are the HRplus Snowflake databases?", "input": "", "output": "The HRplus Snowflake databases are DEV_HRMIS_CDS, DEV_HRMIS_LDS, DEV_HRMIS_PDS, DEV_HRMIS_IDS, and DEV_HRMIS_RDS."}, {"instruction": "What is the role of the Test OU in the Production Active Directory?", "input": "", "output": "The Test OU in the Production Active Directory is used for building and unit testing the integration with the WA Health Active Directory. It contains a Test OU and replicates the structure of existing OUs, with test user identities added within the Test OUs."}, {"instruction": "What is the impact of not having development or testing instances for downstream applications?", "input": "", "output": "If development or testing instances are not available for downstream applications, unit testing will complete at HRplus application boundaries, or an analysis will be performed to connect the Development environment to the production instances of those applications if it can be ensured that those unit testing activities will not impact productive usage, production performance or data."}, {"instruction": "How is access to the Snowflake account and its databases/schemas managed?", "input": "", "output": "Access is managed through HSS defined roles assigned to users."}, {"instruction": "What is the purpose of Talend in this environment?", "input": "", "output": "Talend, hosted on AWS, enables ingestion of data into HRplus Snowflake databases."}, {"instruction": "What is the function of the Reporting Portal?", "input": "", "output": "The Reporting Portal serves as a centralized platform hosting custom Power BI reports for HRplus solutions, allowing users to access various reports in one place."}, {"instruction": "What is the primary use of the Production instance?", "input": "", "output": "The Production instance is primarily used for ETLs (Extract, Transform, Load) processes."}, {"instruction": "What is the recommended approach for connecting to downstream applications during the development stage?", "input": "", "output": "The environment should ideally be connected to development instances of downstream applications to facilitate unit testing of integrations. If unavailable, test instances should be used initially and later connected to the Test environment for comprehensive testing."}, {"instruction": "Under what circumstances is connecting the Development environment to production instances of integrated applications considered?", "input": "", "output": "Connecting to production instances is permissible only when development or test instances are unavailable and if it can be guaranteed that unit testing activities won't negatively impact production usage, performance, or data."}, {"instruction": "Why is a Test OU being created in the Production Active Directory instance?", "input": "", "output": "A Test OU is created to facilitate the building and unit testing of the integration with the WA Health Active Directory without impacting production identities."}, {"instruction": "How is the risk of impacting production identities mitigated when integrating with the Active Directory?", "input": "", "output": "The integration utilizes a service user with restricted permissions, allowing only write and read access to the Test OU, thereby minimizing the risk to production identities."}, {"instruction": "How will developers authenticate in the development environment?", "input": "", "output": "Developers will authenticate using their HE number and password as Single Sign On will not be implemented in the development environment."}, {"instruction": "What is the Data Environment made up of in terms of HRplus Application components?", "input": "", "output": "The Data Environment consists of SuccessFactors EC, EC Payroll, UKG Dimensions (including Dell Boomi instance), SAP CPI, WidgetBrain, SF ECP, and Data Remedy (for data migration)."}, {"instruction": "What is the purpose of the Test instance of Data Remedy in the Data Environment?", "input": "", "output": "The Test instance of Data Remedy is used to manage and migrate data to various environments in the Data Environment."}, {"instruction": "Which Active Directory and Single Sign On policies are implemented in the Data Environment?", "input": "", "output": "Single Sign on will not be implemented in the Data environment and the environment will not be integrated with Active Directory."}, {"instruction": "What components are removed during the UAT stage of the project in the Test Environment?", "input": "", "output": "Deloitte PeopleForms, wah-ci-test, wah-uat, AWS account for UAT, and Agency Management are removed during the UAT stage of the project in the Test Environment."}, {"instruction": "How is access control to the Snowflake account and its databases/schemas achieved?", "input": "", "output": "Access control to the Snowflake account and its databases/schemas is achieved through HSS defined roles that are assigned to users."}, {"instruction": "What is the role of the Reporting Portal in the Test Environment?", "input": "", "output": "The Reporting Portal acts as a centralised serving layer to host custom Power BI reports developed for HRplus solutions."}, {"instruction": "What is the Data Remedy's role for Data migration in the Test Environment?", "input": "", "output": "The Production instance of Data Remedy is used for ETLs (Extract, Transform, Load) in the Test Environment."}, {"instruction": "What is the purpose of moving the Test OU to the Test Active Directory?", "input": "", "output": "To provide further protection for the Production AD."}, {"instruction": "Which component is used for data migration in the Data environment?", "input": "", "output": "Data Remedy."}, {"instruction": "Is Single Sign On implemented in the Data environment?", "input": "", "output": "No, Single Sign On will not be implemented in the Data environment."}, {"instruction": "What happens to the components highlighted in blue in Figure 8 during the UAT stage?", "input": "", "output": "They will be removed and re-connected to the UAT environment for the duration of UAT Testing."}, {"instruction": "Which client is used for EC Payroll in the Test environment?", "input": "", "output": "Y7I 100."}, {"instruction": "What is the purpose of the Snowflake account in the Test environment?", "input": "", "output": "It is used by the HRplus solutions (Schedule Planning and Reporting) to store and manage data via databases and schemas."}, {"instruction": "How is access control managed for the Snowflake account?", "input": "", "output": "Through HSS defined roles that are assigned to users."}, {"instruction": "What is the role of Talend in the Test environment?", "input": "", "output": "Talend, hosted on AWS, enables ingestion of data into HRplus Snowflake databases."}, {"instruction": "What is the function of the Reporting Portal?", "input": "", "output": "It acts as a centralized serving layer to host custom Power BI reports developed for HRplus solutions."}, {"instruction": "Which instance of Data Remedy is used in the Test environment for data migration?", "input": "", "output": "The Production instance."}, {"instruction": "What are the main applications that require testing instances?", "input": "", "output": "The main applications that require testing instances are Oracle FMIS, Oracle EDW, IMC, Snowflake, EDRMS, cGov Credentialling solution, CMGS, and FMIS."}, {"instruction": "What is the purpose of implementing Single Sign On in the Test environment?", "input": "", "output": "The purpose of implementing Single Sign On in the Test environment is to allow users to identify using Single Sign On with the Test Active Directory instance."}, {"instruction": "What is the role of the Test Active Directory instance in the Test environment?", "input": "", "output": "The Test Active Directory instance is used for both testing Single Sign On as well as testing the integration with Active Directory."}, {"instruction": "What is the composition of the HRplus solutions in the UAT environment?", "input": "", "output": "The HRplus solutions in the UAT environment include SuccessFactors EC, EC Payroll, UKG Dimensions, SAP CPI, Deloitte PeopleForms, WidgetBrain, Schedule Planning, and Agency Management."}, {"instruction": "How is access control to the Snowflake account and its databases/schemas achieved?", "input": "", "output": "Access control to the Snowflake account and its databases/schemas is achieved through HSS defined roles that are assigned to users."}, {"instruction": "What is the function of the Reporting Portal in the UAT environment?", "input": "", "output": "The Reporting Portal acts as a centralised serving layer to host custom Power BI reports developed for HRplus solutions."}, {"instruction": "What is the main purpose of the document?", "input": "", "output": "This document excerpt describes the environment plan for testing integrations within a system, likely related to HR and payroll."}, {"instruction": "What is the impact of not having dedicated test instances for applications like Oracle FMIS and Snowflake?", "input": "", "output": "It would significantly impact the quality of testing, increase the risk of issues when integrations move into production, and potentially necessitate investment in a test instance unless the impact of issues is deemed negligible."}, {"instruction": "What happens to the integration with certain applications during the project UAT stage?", "input": "", "output": "Integration to solutions like Oracle FMIS, EDW, IMC, Snowflake, EDRMS, cGov, and CMGS will be removed as these applications will be connected directly to the UAT environment for testing."}, {"instruction": "Why is integration with FMIS required during the later stages of PPR testing?", "input": "", "output": "It's required to validate the GL Posting data, which might involve temporarily disconnecting FMIS from the HRplus Test Environment and connecting it to the Parallel Payroll Environment."}, {"instruction": "How is access control managed for the shared Snowflake account used by HRplus solutions?", "input": "", "output": "Access is controlled through HSS-defined roles assigned to users."}, {"instruction": "What is the function of the Reporting Portal?", "input": "", "output": "It serves as a centralized platform hosting custom Power BI reports for HRplus solutions, allowing users to access various reports from a single location."}, {"instruction": "What is the recommended approach when it is not possible to connect the test environment to downstream applications?", "input": "", "output": "When connecting the test environment to downstream applications is not possible, an analysis is performed to determine if the test environment can be connected to production instances. However, this should not impact productive usage or data. If neither option is available, it is encouraged to invest in a test instance. This is to maintain the quality of testing and mitigate risks when the integrations move into production."}, {"instruction": "What are the minimum required testing instances for the Build stage?", "input": "", "output": "The minimum required testing instances for the Build stage include Oracle FMIS, Oracle EDW, IMC, Snowflake (including WebPAS Data Scheme), EDRMS, cGov Credentialling solution (once selected), and CMGS (Dynamics 365)."}, {"instruction": "What is the approach for integrating FMIS during the later stages of PPR testing for GL Posting data validation?", "input": "", "output": "The approach involves either disconnecting the FMIS solution from the HRplus SIT or UAT Environment and connecting it to the Parallel Payroll Environment for a limited period, or generating files from the Parallel Payroll environment and sharing them with the Finance Test to import into the test FMIS environment for validation."}, {"instruction": "Will Single Sign On be implemented in the Parallel Payroll environment?", "input": "", "output": "No, Single Sign On will not be implemented in the Parallel Payroll environment."}, {"instruction": "What is the role of the Production instance of Data Remedy in the Parallel Payroll environment?", "input": "", "output": "The Production instance of Data Remedy is used to manage and migrate data to the Parallel Payroll environments."}, {"instruction": "What is the main purpose of connecting the test environment to downstream applications?", "input": "", "output": "Connecting the test environment to downstream applications supports testing activities."}, {"instruction": "When is connecting the test environment to production instances of applications considered acceptable?", "input": "", "output": "Connecting to production instances is acceptable only when it can be ensured that testing activities will not impact productive usage or data."}, {"instruction": "What is the minimum requirement for testing instances in the Build stage?", "input": "", "output": "Testing instances are required for applications like Oracle FMIS, Oracle EDW, IMC, Snowflake, EDRMS, cGov Credentialling solution, and CMGS."}, {"instruction": "What happens to the components after UAT is completed?", "input": "", "output": "After UAT, the components are re-connected back to the Test environment to support BAU issue resolution."}, {"instruction": "Why is integration with FMIS required during PPR testing?", "input": "", "output": "Integration with FMIS is needed to validate the GL Posting data."}, {"instruction": "How is Single Sign On (SSO) expected to be used in the Test environment?", "input": "", "output": "All users are expected to identify using SSO with the Test Active Directory instance."}, {"instruction": "Which Active Directory instance will be used for the UAT environment?", "input": "", "output": "The UAT environment will connect to the Test Active Directory instance."}, {"instruction": "What is the purpose of the Parallel Payroll environment?", "input": "", "output": "The Parallel Payroll environment is used for testing HRplus application components."}, {"instruction": "Which tool is used for managing and migrating data to the Parallel Payroll environment?", "input": "", "output": "The Production instance of Data Remedy is used to manage and migrate data."}, {"instruction": "Will Single Sign On be implemented in the Parallel Payroll environment?", "input": "", "output": "No, Single Sign On will not be implemented in the Parallel Payroll environment."}, {"instruction": "What are the components of the Training environment?", "input": "", "output": "The Training environment includes SuccessFactors EC, EC Payroll, UKG Dimensions, SAP CPI, Deloitte PeopleForms, WidgetBrain, Agency Management, Data Remedy (for data migration), and a Reporting Portal."}, {"instruction": "What is the role of the Reporting Portal?", "input": "", "output": "The Reporting Portal acts as a centralized serving layer to host custom Power BI reports developed for HRplus solutions."}, {"instruction": "What is the implementation status of Single Sign On in the Training environment?", "input": "", "output": "Single Sign On will be implemented in the Training environment."}, {"instruction": "What is the purpose of Data Remedy in the Training environment?", "input": "", "output": "Data Remedy is used for data migration in the Training environment."}, {"instruction": "What is the shared component between the Training and Production environments?", "input": "", "output": "The shared component between the Training and Production environments is the use of Snowflake for data management and the Reporting Portal for hosting custom Power BI reports."}, {"instruction": "What is the purpose of the Training environment?", "input": "", "output": "The Training environment is designed for training purposes and may require connectivity to Snowflake to support training activities."}, {"instruction": "Which component in the Training environment includes a Dell Boomi instance?", "input": "", "output": "The UKG Dimensions component in the Training environment includes a Dell Boomi instance."}, {"instruction": "What is the name of the SuccessFactors EC instance in the Training environment?", "input": "", "output": "The SuccessFactors EC instance in the Training environment is named 'healthsuppT3'."}, {"instruction": "How is access control managed for the Snowflake account in the HRplus solutions?", "input": "", "output": "Access control to the Snowflake account is managed through HSS defined roles that are assigned to users."}, {"instruction": "What is the purpose of the Reporting Portal?", "input": "", "output": "The Reporting Portal serves as a centralized platform hosting custom Power BI reports for HRplus solutions, providing users with a single point of access to various HRplus reports."}, {"instruction": "How is Single Sign On (SSO) implemented in the Training environment?", "input": "", "output": "Single Sign On in the Training environment will be implemented through the Production Active Directory instance."}, {"instruction": "What is the name of the SuccessFactors EC instance in the Production environment?", "input": "", "output": "The SuccessFactors EC instance in the Production environment is named 'Healthsupp'."}, {"instruction": "What is the purpose of Talend in the Production environment?", "input": "", "output": "Talend, hosted on AWS, facilitates the ingestion of data into HRplus Snowflake databases within the Production environment."}, {"instruction": "What is the purpose of the Reporting Portal?", "input": "", "output": "The Reporting Portal acts as a centralized serving layer to host custom Power BI reports developed for HRplus solutions."}, {"instruction": "How are changes migrated in SuccessFactors Employee Central?", "input": "", "output": "For SuccessFactors Employee Central, changes are typically manually applied in the target environment. However, there are limited cases where features are provided to export and import configurations."}, {"instruction": "What application is used to migrate changes in UKG Dimensions?", "input": "", "output": "In UKG Dimensions, the 'Setup Data Manager' application is used to migrate changes between two environments."}, {"instruction": "What potential issue may occur when migrating changes in UKG Dimensions?", "input": "", "output": "When migrating changes in UKG Dimensions, there is a potential for the system to re-totalize some or all of the timecards, which may cause system performance impacts."}, {"instruction": "What is the recommended time to move changes into productive environments?", "input": "", "output": "It is recommended to move changes into productive environments at agreed times and, where possible, at periods of low usage to minimize risk of impact."}, {"instruction": "What is the main purpose of the Reporting Portal?", "input": "", "output": "The Reporting Portal serves as a centralized platform to host custom Power BI reports for HRplus solutions, allowing users to access various reports in one place."}, {"instruction": "Which Snowflake Databases are used for HRMIS?", "input": "", "output": "The Snowflake Databases used for HRMIS are PRD_HRMIS_CDS, PRD_HRMIS_LDS, PRD_HRMIS_PDS, PRD_HRMIS_IDS, and PRD_HRMIS_RDS."}, {"instruction": "What is the function of the Data Remedy production instance?", "input": "", "output": "The Data Remedy production instance is used for ETLs (Extract, Transform, Load) processes related to data migration."}, {"instruction": "How is Single Sign On (SSO) implemented in the Production environment?", "input": "", "output": "Single Sign On in the Production environment is expected to be used by all users and will be provided by the Production Active Directory instance."}, {"instruction": "How is configuration migrated in SuccessFactors Employee Central for most configuration items?", "input": "", "output": "For most configuration items in SuccessFactors Employee Central, changes are manually applied in the target environment by developers."}, {"instruction": "When are configuration import features used in SuccessFactors Employee Central?", "input": "", "output": "Configuration import features in SuccessFactors Employee Central are primarily used when there's a significant level of change, such as during the initial configuration cutover."}, {"instruction": "How are configuration changes migrated in UKG Dimensions?", "input": "", "output": "In UKG Dimensions, configuration changes are migrated using the UKG Dimensions \ufffdSetup Data Manager\ufffd application, where developers manually select and transport changes to the target environment."}, {"instruction": "What is a potential impact of applying configuration changes in UKG Dimensions?", "input": "", "output": "Applying configuration changes in UKG Dimensions can potentially cause the system to re-totalise timecards and might impact system performance."}, {"instruction": "What recommendation is given for moving changes into productive environments?", "input": "", "output": "It is recommended to move the changes into productive environments at agreed times and, where possible, at periods of low usage to minimize risk of impact."}, {"instruction": "What is the process for migrating an integration flow in Dell Boomi?", "input": "", "output": "The integration flow can be exported from the source environment and imported into the target, overwriting the integration flow in the target environment. Connectivity must be re-configured to re-connect to the relevant instances for the target environment."}, {"instruction": "What is the role of the Transport Management System in SuccessFactors ECP?", "input": "", "output": "The Transport Management System in SuccessFactors ECP captures configuration changes in a container named a transport. This configuration container can hold multiple related configuration changes and can be imported into other environments."}, {"instruction": "How are changes migrated in WidgetBrain?", "input": "", "output": "In WidgetBrain, the revised configuration must be manually applied in the target environment. The developer is required to re-apply the changes they applied in the previous environment."}, {"instruction": "What is unique about the SAP CPI instance setup?", "input": "", "output": "SAP CPI only has two instances, a production and non-production instance. In the non-production instance, there will be multiple copies of each integration flow, each copy relating to the environment the copy is used in."}, {"instruction": "When is it recommended to apply changes to a productive environment?", "input": "", "output": "It is recommended to move changes into productive environments at agreed times and, where possible, at periods of low usage to minimize risk of impact."}, {"instruction": "Why should changes not be applied during payroll extract?", "input": "", "output": "Applying changes during payroll extract could result in incorrect payroll results being sent."}, {"instruction": "How are integration flows migrated in UKG Dimensions?", "input": "", "output": "Integration flows in UKG Dimensions can be migrated by exporting the flow from the source environment and importing it into the target environment, overwriting the existing flow."}, {"instruction": "What must be done with integration tables when migrating an integration flow in UKG Dimensions?", "input": "", "output": "Any integrations tables used by the integration flow must be manually migrated as required."}, {"instruction": "What is the name of the tool used in SuccessFactors ECP for managing configuration changes?", "input": "", "output": "SuccessFactors ECP utilizes a tool called the Transport Management System for managing configuration changes."}, {"instruction": "How are configuration changes grouped and moved in SuccessFactors ECP?", "input": "", "output": "Configuration changes in SuccessFactors ECP are grouped into configuration containers called 'transports,' which are then moved to subsequent environments as a single unit."}, {"instruction": "Is manual configuration possible in SuccessFactors ECP outside of the Development environment?", "input": "", "output": "No, manual configuration is only allowed in the Development environment. All other environments require configuration changes to be migrated using the transport mechanism."}, {"instruction": "How are configuration changes migrated in WidgetBrain?", "input": "", "output": "In WidgetBrain, configuration changes are migrated by manually applying the revised configuration in the target environment."}, {"instruction": "How many instances does SAP CPI have?", "input": "", "output": "SAP CPI has two instances: a production instance and a non-production instance."}, {"instruction": "How are different environments managed within the non-production instance of SAP CPI?", "input": "", "output": "The non-production instance of SAP CPI utilizes multiple copies of each integration flow, with each copy corresponding to a specific environment (e.g., Development, Test, Data). Each copy is then connected to the relevant instances of HRplus components and downstream applications."}, {"instruction": "What are the different copies of 'Integration Flow 1' in the non-production environment?", "input": "", "output": "In the non-production environment, there will be copies such as 'Integration Flow 1 - Development', 'Integration Flow 1 - Test', 'Integration Flow 1 - Data'."}, {"instruction": "How is an integration flow migrated from non-production to production?", "input": "", "output": "The integration flow is exported to a file from the non-production environment and then imported into production, overwriting the version in production."}, {"instruction": "What is the recommended time for migrating changes into production for Deloitte PeopleForms?", "input": "", "output": "It is recommended to move the changes into productive environments at agreed times and, where possible, at periods of low usage to minimise risk of impact."}, {"instruction": "What approach does DataRemedy use for development and deployment?", "input": "", "output": "DataRemedy uses a DevOps approach, including Continuous Integration/Continuous Delivery (CI/CD), allowing developers to make changes to code that are then automatically tested and pushed out for delivery and deployment."}, {"instruction": "Which components of the solution can be copied from one environment to another?", "input": "", "output": "SuccessFactors Employee Central, UKG Dimensions, and SuccessFactors EC Payroll can be copied from one environment to another."}, {"instruction": "What precautions should be taken when copying SuccessFactors EC environments?", "input": "", "output": "Care must be taken when copying SuccessFactors EC environments, ensuring that integrations are re-pointed, theming is updated, and user records are updated to ensure only authorized users can access the environment."}, {"instruction": "How are integration flows migrated in a non-production environment?", "input": "", "output": "In a non-production environment, an integration flow is migrated by copying the source integration flow to the target integration flow, overwriting the existing version in the target."}, {"instruction": "What is the process for migrating an integration flow from a non-production environment to production?", "input": "", "output": "The integration flow is first exported to a file and then imported into the production environment. This overwrites the existing version in production."}, {"instruction": "Is there any downtime associated with migrating integration flows?", "input": "", "output": "While migration itself doesn't require downtime, it's recommended to perform migrations during periods of low usage to minimize potential impact. The target integration must be stopped before importing the new version."}, {"instruction": "How many environments does Deloitte PeopleForms have?", "input": "", "output": "Deloitte PeopleForms has two environments: production and non-production."}, {"instruction": "What needs to be manually mapped when migrating a form in Deloitte PeopleForms?", "input": "", "output": "Integrations and value help need to be mapped manually to match the configuration of each environment."}, {"instruction": "What is the purpose of the DataRemedy platform mentioned in the document?", "input": "", "output": "DataRemedy is used for task management, defect and enhancement tracking, and code version control within the Data Migration Development team. It utilizes a DevOps approach with CI/CD to facilitate rapid deployment and iterative development."}, {"instruction": "Which HRplus components allow for environment copying?", "input": "", "output": "SuccessFactors Employee Central, UKG Dimensions, and SuccessFactors EC Payroll allow for environment copying."}, {"instruction": "What precaution should be taken when copying environments for HRplus components?", "input": "", "output": "Care should be taken to copy all related components together. Copying only one component without the others can lead to data synchronization issues and errors during testing."}, {"instruction": "What happens to the data and configuration in the target environment when copying a SuccessFactors Employee Central environment?", "input": "", "output": "Both the data and configuration in the target environment are overwritten with the contents of the source environment."}, {"instruction": "Who is responsible for copying SuccessFactors EC environments?", "input": "", "output": "Certified developers are responsible for copying SuccessFactors EC environments, but a time slot must be booked with SAP in advance."}, {"instruction": "What actions are required after copying a SuccessFactors EC environment?", "input": "", "output": "After copying, integrations need to be re-pointed, instance-specific configurations like theming need updating, and user records need to be updated to ensure only authorized users have access."}, {"instruction": "What happens when copying one environment of UKG Dimensions to another?", "input": "", "output": "Both the data and configuration in the target environment are overwritten with the contents of the source environment. It is not allowed to copy any environment to production."}, {"instruction": "Who performs the copying of UKG Dimensions environments?", "input": "", "output": "UKG performs the copying of UKG Dimensions environments upon request of a responsible party."}, {"instruction": "What steps are required after copying a UKG Dimensions environment?", "input": "", "output": "Integrations must be re-pointed, user records updated, and data scrambled if necessary."}, {"instruction": "What is the role of the Test Manager during the Testing stage of the project?", "input": "", "output": "The Test Manager chairs a daily triage, reviews the progress of all defects, and authorises the migration of a defect correction from development to subsequent environments."}, {"instruction": "What is Employee Central?", "input": "", "output": "Employee Central is the commercial name of the HR records component of the HRplus solution. It holds records on Organization Structure, Personal Information, Employment Information, Compensation Information, Long Term Absences, and Payroll Information."}, {"instruction": "What is the commercial name of the payroll component of the HRplus solution?", "input": "", "output": "Employee Central Payroll"}, {"instruction": "What is the role of Active Directory in the Health network?", "input": "", "output": "Active Directory is an application used to manage the user identities on the network and grant access to the Health network."}, {"instruction": "What happens to the data and configuration in the target environment during the copying process of UKG Dimensions?", "input": "", "output": "During the copying process of UKG Dimensions, the data and configuration in the target environment are overwritten with the contents of the source environment."}, {"instruction": "Why is it important to update user records after copying a UKG Dimensions environment?", "input": "", "output": "It is crucial to update user records after copying a UKG Dimensions environment to ensure only authorized users have access to the specific environment, preventing access from users authorized for the source environment."}, {"instruction": "What precaution should be taken when copying an environment with unscrambled data to one with scrambled data in both UKG Dimensions and SuccessFactors EC Payroll?", "input": "", "output": "When copying an environment with unscrambled data to one with scrambled data, the data in the target environment should be scrambled before granting user access to maintain data security and privacy."}, {"instruction": "Who is responsible for approving the migration of defect corrections during the Testing stage of the project?", "input": "", "output": "The Test Manager is responsible for approving the migration of defect corrections during the Testing stage, with additional approval from the Training Manager for the training environment."}, {"instruction": "What is the purpose of the daily triage held during the Testing stage?", "input": "", "output": "The purpose of the daily triage is to review the progress of all defects and authorize the migration of corrections from development to subsequent testing environments."}, {"instruction": "What happens to the change management process from the Cutover and Deployment stage onwards?", "input": "", "output": "From the Cutover and Deployment stage, change management aligns with HSS's Change Management processes, including approval via a CAB process, requiring double handling of manual changes in production for validation."}, {"instruction": "What is the role of Active Directory as defined in the glossary?", "input": "", "output": "Active Directory manages user identities on the network and grants access to the Health network."}, {"instruction": "What kind of information is stored within Employee Central?", "input": "", "output": "Employee Central stores Organization Structure, Personal Information, Employment Information, Compensation Information, and Long Term Absences."}, {"instruction": "What is the function of Employee Central Payroll?", "input": "", "output": "Employee Central Payroll manages Payroll Processing with Payroll Control Centre, Salary Payments, Pay Statements, Legal Reporting, Single Touch Payroll, Super Stream, and Bank Files."}, {"instruction": "What is UKG Dimensions primarily used for?", "input": "", "output": "UKG Dimensions is the commercial name for the rostering component of the HRplus solution."}, {"instruction": "What is the commercial name of the auto-scheduling component of the HRplus solution?", "input": "", "output": "WidgetBrain by Quinyx"}, {"instruction": "What is Deloitte DNA Solution?", "input": "", "output": "Deloitte\ufffds Best Practice solution."}, {"instruction": "What is the name of the program to replace the legacy HR solutions?", "input": "", "output": "HRMIS"}, {"instruction": "What is the name of the solution being implemented by the HRMIS program?", "input": "", "output": "HRplus"}, {"instruction": "What is the purpose of SAP CPI?", "input": "", "output": "SAP CPI is an Integration Middleware tool provided by SAP and forms part of the SAP Business Transformation Platform (BTP) suit of applications provided by SAP."}, {"instruction": "What is the function of Deloitte PeopleForms?", "input": "", "output": "Deloitte PeopleForms is a solution provided to create custom forms for data entry and workflow approval."}, {"instruction": "What does ECP stand for and what is its purpose?", "input": "", "output": "ECP stands for Employee Central Payroll. Its purpose is not explicitly stated in the passage, but it is mentioned as part of the SAP Business Transformation Platform (BTP) suit of applications provided by SAP."}, {"instruction": "What is CFM Credentialing?", "input": "", "output": "cGov Credentialing is a solution by cGov used to manage the credentials of medical professionals and others across WA Health. It is a replacement of the legacy Equifax solution."}, {"instruction": "What is CMGS and what does it do?", "input": "", "output": "CMGS stands for Contract Management and Generation System. It is a Custom Solution implemented by WACHS ICT to manage Contract Medical Professionals and generate contracts for all employees. It is a replacement of the legacy solution named WACHSView."}, {"instruction": "What is the purpose of the provided text?", "input": "", "output": "This text is a glossary of terms from a document about HR solutions."}, {"instruction": "What does the acronym 'CMGS' stand for and what is its function?", "input": "", "output": "CMGS stands for Contract Management and Generation System. It's a custom solution by WACHS ICT to manage contracts for medical professionals and all employees, replacing the legacy system WACHSView."}, {"instruction": "Which solution is being replaced by cGov Credentialing?", "input": "", "output": "cGov Credentialing is replacing the legacy solution, Equifax."}, {"instruction": "What does 'PPR' stand for in the context of this glossary?", "input": "", "output": "PPR stands for Parallel Payroll Testing."}, {"instruction": "What is SAP CPI?", "input": "", "output": "SAP CPI is an Integration Middleware tool provided by SAP and is part of the SAP Business Transformation Platform (BTP) suite of applications."}, {"instruction": "What is the name of the program replacing the legacy HR solutions?", "input": "", "output": "The program replacing the legacy HR solutions is called HRMIS (HR Management Information Systems)."}]
|