Juan Palomino commited on
Commit
6a58fd5
·
1 Parent(s): 08d5359

Deploy Document Format Converter

Browse files
.gitattributes CHANGED
@@ -1,35 +1,2 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ *.docx filter=lfs diff=lfs merge=lfs -text
2
+ *.pdf filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -1,12 +1,61 @@
1
- ---
2
- title: Docgen
3
- emoji:
4
- colorFrom: purple
5
- colorTo: gray
6
- sdk: gradio
7
- sdk_version: 5.27.0
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Document Format Converter
2
+
3
+ Upload document and select target format for conversion.
4
+
5
+ ## Web Interface
6
+
7
+ This application provides a web interface for document conversion. Simply upload your document and select the target format.
8
+
9
+ ## API Endpoints
10
+
11
+ The application also provides API endpoints for programmatic access:
12
+
13
+ ### 1. DOCX to JSON Conversion
14
+
15
+ ```python
16
+ import requests
17
+
18
+ # URL of the API endpoint
19
+ url = "https://huggingface.co/spaces/ObiJuanCodenobi/docgen/api/docx-to-json"
20
+
21
+ # Prepare the file for upload
22
+ files = {
23
+ 'file': ('document.docx', open('path/to/your/document.docx', 'rb'), 'application/vnd.openxmlformats-officedocument.wordprocessingml.document')
24
+ }
25
+
26
+ # Send the request
27
+ response = requests.post(url, files=files)
28
+
29
+ # Get the JSON result
30
+ if response.status_code == 200:
31
+ json_data = response.json()
32
+ print(json_data)
33
+ else:
34
+ print(f"Error: {response.status_code}, {response.text}")
35
+ ```
36
+
37
+ ### 2. JSON to DOCX Conversion
38
+
39
+ ```python
40
+ import requests
41
+
42
+ # URL of the API endpoint
43
+ url = "https://huggingface.co/spaces/ObiJuanCodenobi/docgen/api/json-to-docx"
44
+
45
+ # Your JSON document data
46
+ json_data = {
47
+ # Your document structure here
48
+ }
49
+
50
+ # Send the request
51
+ headers = {'Content-Type': 'application/json'}
52
+ response = requests.post(url, json=json_data, headers=headers)
53
+
54
+ # Save the DOCX file
55
+ if response.status_code == 200:
56
+ with open('converted.docx', 'wb') as f:
57
+ f.write(response.content)
58
+ print("DOCX file saved as 'converted.docx'")
59
+ else:
60
+ print(f"Error: {response.status_code}, {response.text}")
61
+ ```
app.py ADDED
@@ -0,0 +1,592 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import pypandoc
3
+ import os
4
+ from pdf2docx import Converter
5
+ from docx import Document
6
+ from docx.table import _Cell
7
+ from docx.shared import Inches, Pt, RGBColor
8
+ from docx.enum.text import WD_ALIGN_PARAGRAPH, WD_COLOR_INDEX
9
+ from docx.oxml.ns import qn
10
+ import json
11
+ import base64
12
+ import hashlib
13
+ import sys
14
+ import tempfile
15
+ from flask import Flask, request, jsonify, send_file
16
+ import threading
17
+
18
+ os.system('sudo apt-get install texlive')
19
+
20
+ def ensure_pandoc_installed():
21
+ try:
22
+ # Periksa apakah pandoc sudah ada
23
+ pypandoc.get_pandoc_version()
24
+ print("Pandoc is already installed and accessible.")
25
+ except OSError:
26
+ # Unduh pandoc jika belum ada
27
+ print("Pandoc not found, downloading...")
28
+ pypandoc.download_pandoc()
29
+ print("Pandoc downloaded successfully.")
30
+
31
+ # Pastikan Pandoc terpasang
32
+ ensure_pandoc_installed()
33
+
34
+ # Daftar format yang didukung
35
+ input_supported_formats = [data.upper() for data in sorted(list(pypandoc.get_pandoc_formats()[0]).append('PDF') or [
36
+ 'BIBLATEX', 'BIBTEX', 'BITS', 'COMMONMARK', 'COMMONMARK_X', 'CREOLE', 'CSLJSON', 'CSV',
37
+ 'DJOT', 'DOCBOOK', 'DOCX', 'DOKUWIKI', 'ENDNOTEXML', 'EPUB', 'FB2', 'GFM', 'HADDOCK',
38
+ 'HTML', 'IPYNB', 'JATS', 'JIRA', 'JSON', 'LATEX', 'MAN', 'MARKDOWN', 'MARKDOWN_GITHUB',
39
+ 'MARKDOWN_MMD', 'MARKDOWN_PHPEXTRA', 'MARKDOWN_STRICT', 'MDOC', 'MEDIAWIKI', 'MUSE',
40
+ 'NATIVE', 'ODT', 'OPML', 'ORG', 'PDF', 'POD', 'RIS', 'RST', 'RTF', 'T2T', 'TEXTILE',
41
+ 'TIKIWIKI', 'TSV', 'TWIKI', 'TYPST', 'VIMWIKI'
42
+ ]) if data not in ['PDF']]
43
+
44
+ output_supported_formats = [data.upper() for data in sorted([
45
+ "ANSI", "ASCIIDOC", "ASCIIDOC_LEGACY", "ASCIIDOCTOR", "BEAMER", "BIBLATEX", "BIBTEX", "CHUNKEDHTML",
46
+ "COMMONMARK", "COMMONMARK_X", "CONTEXT", "CSLJSON", "DJOT", "DOCBOOK", "DOCBOOK4", "DOCBOOK5",
47
+ "DOCX", "DOKUWIKI", "DZSLIDES", "EPUB", "EPUB2", "EPUB3", "FB2", "GFM", "HADDOCK", "HTML",
48
+ "HTML4", "HTML5", "ICML", "IPYNB", "JATS", "JATS_ARCHIVING", "JATS_ARTICLEAUTHORING",
49
+ "JATS_PUBLISHING", "JIRA", "JSON", "LATEX", "MAN", "MARKDOWN", "MARKDOWN_GITHUB",
50
+ "MARKDOWN_MMD", "MARKDOWN_PHPEXTRA", "MARKDOWN_STRICT", "MARKUA", "MEDIAWIKI", "MS",
51
+ "MUSE", "NATIVE", "ODT", "OPENDOCUMENT", "OPML", "ORG", "PDF", "PLAIN", "PPTX", "REVEALJS",
52
+ "RST", "RTF", "S5", "SLIDEOUS", "SLIDY", "TEI", "TEXINFO", "TEXTILE", "TYPST", "XWIKI", "ZIMWIKI"
53
+ ]) if data not in ['PDF']]
54
+
55
+ def convert_pdf_to_docx(pdf_file):
56
+ output_docx = f"{os.path.splitext(pdf_file)[0]}.docx"
57
+ cv = Converter(pdf_file)
58
+ cv.convert(output_docx, start=0, end=None)
59
+ return output_docx
60
+
61
+ def get_preview(file_path):
62
+ ext = os.path.splitext(file_path)[1].lower()
63
+ try:
64
+ if ext in ['.txt', '.md', '.csv', '.json']:
65
+ with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
66
+ content = f.read(2000) # Preview first 2000 chars
67
+ return f"<pre style='max-height:300px;overflow:auto'>{content}</pre>"
68
+ elif ext == '.pdf':
69
+ # Show PDF inline using HTML embed
70
+ return f"<embed src='{file_path}' type='application/pdf' width='100%' height='400px' />"
71
+ elif ext == '.docx':
72
+ try:
73
+ doc = Document(file_path)
74
+ html = ""
75
+ # Extract header(s) with paragraphs and tables
76
+ headers = []
77
+ for section in doc.sections:
78
+ header_texts = []
79
+ # Paragraphs
80
+ for p in section.header.paragraphs:
81
+ if p.text.strip():
82
+ header_texts.append(p.text.strip())
83
+ # Tables
84
+ for table in section.header.tables:
85
+ for row in table.rows:
86
+ row_text = " ".join(cell.text.strip() for cell in row.cells if cell.text.strip())
87
+ if row_text:
88
+ header_texts.append(row_text)
89
+ if header_texts:
90
+ headers.append(" | ".join(header_texts))
91
+ if headers:
92
+ html += f"<div style='font-weight:bold;font-size:1.2em;margin-bottom:8px;'>{' | '.join(headers)}</div>"
93
+ para_count = 0
94
+ for para in doc.paragraphs:
95
+ text = para.text.strip()
96
+ if text:
97
+ html += f"<p>{text}</p>"
98
+ para_count += 1
99
+ if para_count > 30:
100
+ html += "<p><i>Preview truncated...</i></p>"
101
+ break
102
+ return f"<div style='max-height:300px;overflow:auto'>{html}</div>"
103
+ except Exception as e:
104
+ return f"<b>Error reading DOCX:</b> {e}"
105
+ elif ext == '.doc':
106
+ return f"<b>DOC file:</b> {os.path.basename(file_path)} (Preview not supported)"
107
+ else:
108
+ return f"<b>File:</b> {os.path.basename(file_path)} (Preview not supported)"
109
+ except Exception as e:
110
+ return f"<b>Error generating preview:</b> {e}"
111
+
112
+ def extract_runs(paragraph):
113
+ runs = []
114
+ for run in paragraph.runs:
115
+ run_data = {
116
+ "text": run.text
117
+ }
118
+ if run.bold:
119
+ run_data["bold"] = True
120
+ if run.italic:
121
+ run_data["italic"] = True
122
+ if run.underline:
123
+ run_data["underline"] = True
124
+ if run.font and run.font.size:
125
+ run_data["font_size"] = run.font.size.pt
126
+ if run.font and run.font.name:
127
+ run_data["font_name"] = run.font.name
128
+ # Extract color (RGB or theme)
129
+ if run.font and run.font.color:
130
+ if run.font.color.rgb:
131
+ run_data["color"] = str(run.font.color.rgb)
132
+ elif run.font.color.theme_color:
133
+ run_data["color_theme"] = str(run.font.color.theme_color)
134
+ # Highlight color
135
+ if run.font and hasattr(run.font, "highlight_color") and run.font.highlight_color:
136
+ run_data["highlight"] = str(run.font.highlight_color)
137
+ runs.append(run_data)
138
+ return runs
139
+
140
+ # Detect heading and list paragraphs
141
+ def extract_paragraph_block(paragraph):
142
+ style_name = paragraph.style.name if paragraph.style else "Normal"
143
+ # Heading
144
+ if style_name.startswith("Heading"):
145
+ try:
146
+ level = int(style_name.split()[-1])
147
+ except Exception:
148
+ level = 1
149
+ return {
150
+ "type": "heading",
151
+ "level": level,
152
+ "runs": extract_runs(paragraph),
153
+ "alignment": str(paragraph.alignment) if paragraph.alignment else "left",
154
+ "style": style_name
155
+ }
156
+ # List
157
+ elif "List" in style_name:
158
+ return {
159
+ "type": "list_item",
160
+ "list_type": "number" if "Number" in style_name else "bullet",
161
+ "runs": extract_runs(paragraph),
162
+ "alignment": str(paragraph.alignment) if paragraph.alignment else "left",
163
+ "style": style_name
164
+ }
165
+ # Normal paragraph
166
+ else:
167
+ return {
168
+ "type": "paragraph",
169
+ "runs": extract_runs(paragraph),
170
+ "alignment": str(paragraph.alignment) if paragraph.alignment else "left",
171
+ "style": style_name
172
+ }
173
+
174
+ # Add spacing extraction
175
+ def extract_blocks(element, output_dir, image_prefix):
176
+ blocks = []
177
+ if hasattr(element, 'paragraphs'):
178
+ for para in element.paragraphs:
179
+ if para.text.strip():
180
+ para_block = extract_paragraph_block(para)
181
+ # Add spacing info
182
+ pf = para.paragraph_format
183
+ if pf:
184
+ if pf.space_before:
185
+ para_block["space_before"] = pf.space_before.pt
186
+ if pf.space_after:
187
+ para_block["space_after"] = pf.space_after.pt
188
+ if pf.line_spacing:
189
+ para_block["line_spacing"] = pf.line_spacing
190
+ blocks.append(para_block)
191
+ if hasattr(element, 'tables'):
192
+ for table in element.tables:
193
+ blocks.append(extract_table_block(table))
194
+ return blocks
195
+
196
+ def extract_table_block(table):
197
+ rows = []
198
+ for row in table.rows:
199
+ row_cells = []
200
+ for cell in row.cells:
201
+ # Only take unique paragraphs (python-docx repeats cell objects)
202
+ unique_paras = []
203
+ seen = set()
204
+ for para in cell.paragraphs:
205
+ para_id = id(para)
206
+ if para_id not in seen:
207
+ unique_paras.append(para)
208
+ seen.add(para_id)
209
+ row_cells.append([extract_paragraph_block(para) for para in unique_paras if para.text.strip()])
210
+ rows.append(row_cells)
211
+ return {"type": "table", "rows": rows}
212
+
213
+ def extract_images_from_doc(doc, output_dir, image_prefix):
214
+ image_blocks = []
215
+ rels = doc.part.rels
216
+ for rel in rels.values():
217
+ if rel.reltype == 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/image':
218
+ img_blob = rel.target_part.blob
219
+ img_hash = hashlib.sha1(img_blob).hexdigest()[:8]
220
+ img_ext = rel.target_part.content_type.split('/')[-1]
221
+ img_id = f"{image_prefix}_{img_hash}"
222
+ img_filename = f"{img_id}.{img_ext}"
223
+ img_path = os.path.join(output_dir, img_filename)
224
+ with open(img_path, 'wb') as f:
225
+ f.write(img_blob)
226
+ image_blocks.append({
227
+ "type": "image",
228
+ "image_id": img_id,
229
+ "image_format": img_ext,
230
+ "path": img_filename
231
+ })
232
+ return image_blocks
233
+
234
+ def add_runs_to_paragraph(paragraph, runs):
235
+ for run_info in runs:
236
+ run = paragraph.add_run(run_info.get("text", ""))
237
+ if run_info.get("bold"): run.bold = True
238
+ if run_info.get("italic"): run.italic = True
239
+ if run_info.get("underline"): run.underline = True
240
+ if run_info.get("font_size"): run.font.size = Pt(run_info["font_size"])
241
+ if run_info.get("font_name"): run.font.name = run_info["font_name"]
242
+ # Set color (RGB or theme)
243
+ if run_info.get("color"):
244
+ try:
245
+ run.font.color.rgb = RGBColor.from_string(run_info["color"].replace("#", ""))
246
+ except Exception:
247
+ pass
248
+ elif run_info.get("color_theme"):
249
+ try:
250
+ run.font.color.theme_color = int(run_info["color_theme"])
251
+ except Exception:
252
+ pass
253
+ if run_info.get("highlight"):
254
+ try:
255
+ if run_info["highlight"].isdigit():
256
+ run.font.highlight_color = int(run_info["highlight"])
257
+ else:
258
+ run.font.highlight_color = WD_COLOR_INDEX[run_info["highlight"]]
259
+ except Exception:
260
+ pass
261
+
262
+ # Add heading and list support
263
+ def add_block_to_doc(doc, block, image_dir):
264
+ if block["type"] == "heading":
265
+ level = block.get("level", 1)
266
+ text = "".join([r.get("text", "") for r in block.get("runs", [])])
267
+ para = doc.add_heading(text, level=level)
268
+ add_runs_to_paragraph(para, block.get("runs", []))
269
+ align = block.get("alignment", "left")
270
+ if align == "center": para.alignment = WD_ALIGN_PARAGRAPH.CENTER
271
+ elif align == "right": para.alignment = WD_ALIGN_PARAGRAPH.RIGHT
272
+ else: para.alignment = WD_ALIGN_PARAGRAPH.LEFT
273
+ # Spacing
274
+ if "space_before" in block: para.paragraph_format.space_before = Pt(block["space_before"])
275
+ if "space_after" in block: para.paragraph_format.space_after = Pt(block["space_after"])
276
+ if "line_spacing" in block: para.paragraph_format.line_spacing = block["line_spacing"]
277
+ elif block["type"] == "list_item":
278
+ style = "List Number" if block.get("list_type") == "number" else "List Bullet"
279
+ para = doc.add_paragraph(style=style)
280
+ add_runs_to_paragraph(para, block.get("runs", []))
281
+ align = block.get("alignment", "left")
282
+ if align == "center": para.alignment = WD_ALIGN_PARAGRAPH.CENTER
283
+ elif align == "right": para.alignment = WD_ALIGN_PARAGRAPH.RIGHT
284
+ else: para.alignment = WD_ALIGN_PARAGRAPH.LEFT
285
+ if "space_before" in block: para.paragraph_format.space_before = Pt(block["space_before"])
286
+ if "space_after" in block: para.paragraph_format.space_after = Pt(block["space_after"])
287
+ if "line_spacing" in block: para.paragraph_format.line_spacing = block["line_spacing"]
288
+ elif block["type"] == "paragraph":
289
+ para = doc.add_paragraph()
290
+ add_runs_to_paragraph(para, block.get("runs", []))
291
+ align = block.get("alignment", "left")
292
+ if align == "center": para.alignment = WD_ALIGN_PARAGRAPH.CENTER
293
+ elif align == "right": para.alignment = WD_ALIGN_PARAGRAPH.RIGHT
294
+ else: para.alignment = WD_ALIGN_PARAGRAPH.LEFT
295
+ if "space_before" in block: para.paragraph_format.space_before = Pt(block["space_before"])
296
+ if "space_after" in block: para.paragraph_format.space_after = Pt(block["space_after"])
297
+ if "line_spacing" in block: para.paragraph_format.line_spacing = block["line_spacing"]
298
+ elif block["type"] == "table":
299
+ rows = block.get("rows", [])
300
+ if rows:
301
+ try:
302
+ section = doc.sections[0]
303
+ table_width = section.page_width
304
+ except Exception:
305
+ table_width = Inches(6)
306
+ table = doc.add_table(rows=len(rows), cols=len(rows[0]), width=table_width)
307
+ for i, row in enumerate(rows):
308
+ for j, cell_blocks in enumerate(row):
309
+ cell = table.cell(i, j)
310
+ for para_block in cell_blocks:
311
+ add_block_to_doc(cell, para_block, image_dir)
312
+ elif block["type"] == "image":
313
+ img_path = os.path.join(image_dir, block["path"])
314
+ width = block.get("width")
315
+ height = block.get("height")
316
+ if os.path.exists(img_path):
317
+ if width and height:
318
+ doc.add_picture(img_path, width=Inches(width/96), height=Inches(height/96))
319
+ else:
320
+ doc.add_picture(img_path)
321
+
322
+ def add_blocks_to_doc(doc, blocks, image_dir):
323
+ for block in blocks:
324
+ # If doc is a header/footer, use add_paragraph directly
325
+ if hasattr(doc, 'is_header') or hasattr(doc, 'is_footer') or hasattr(doc, 'add_paragraph'):
326
+ add_block_to_doc(doc, block, image_dir)
327
+ else:
328
+ # If doc is a SectionHeader or SectionFooter (python-docx), use .add_paragraph()
329
+ try:
330
+ add_block_to_doc(doc, block, image_dir)
331
+ except Exception:
332
+ pass
333
+
334
+ def extract_all_sections(doc, output_dir, image_prefix):
335
+ sections = []
336
+ for idx, section in enumerate(doc.sections):
337
+ sec = {}
338
+ for htype, attr in [("header", "header"), ("first_page_header", "first_page_header"), ("even_page_header", "even_page_header"),
339
+ ("footer", "footer"), ("first_page_footer", "first_page_footer"), ("even_page_footer", "even_page_footer")]:
340
+ part = getattr(section, attr, None)
341
+ if part:
342
+ sec[htype] = extract_blocks(part, output_dir, f"{image_prefix}_sec{idx}_{htype}")
343
+ sections.append(sec)
344
+ return sections
345
+
346
+ def convert_document(doc_file, target_format):
347
+ import json
348
+ from docx import Document as DocxDocument
349
+ try:
350
+ target_format = target_format.lower()
351
+ orig_file_path = None
352
+ # Handle Gradio NamedString or file-like object
353
+ if hasattr(doc_file, 'name'):
354
+ orig_file_path = doc_file.name
355
+ elif isinstance(doc_file, str):
356
+ orig_file_path = doc_file
357
+ else:
358
+ return None, "Error: Unsupported file type.", None
359
+ # If the file is a PDF, convert it to DOCX first
360
+ if orig_file_path.lower().endswith('.pdf'):
361
+ print("Converting PDF to DOCX...")
362
+ doc_file = convert_pdf_to_docx(orig_file_path)
363
+ print("PDF converted to DOCX.")
364
+ orig_file_path = doc_file
365
+ base_name = os.path.splitext(os.path.basename(orig_file_path))[0]
366
+ output_file = f"docgen_{base_name}.{target_format.lower()}"
367
+ # Custom DOCX to JSON extraction
368
+ if orig_file_path.lower().endswith('.docx') and target_format == 'json':
369
+ doc = Document(orig_file_path)
370
+ output_dir = os.path.dirname(output_file)
371
+ image_prefix = base_name
372
+ image_blocks = extract_images_from_doc(doc, output_dir, image_prefix)
373
+ sections = extract_all_sections(doc, output_dir, image_prefix)
374
+ body_blocks = extract_blocks(doc, output_dir, image_prefix)
375
+ doc_json = {
376
+ "sections": sections,
377
+ "body": body_blocks + image_blocks,
378
+ "metadata": {
379
+ "title": getattr(doc.core_properties, 'title', ''),
380
+ "author": getattr(doc.core_properties, 'author', ''),
381
+ }
382
+ }
383
+ with open(output_file, 'w', encoding='utf-8') as f:
384
+ json.dump(doc_json, f, ensure_ascii=False, indent=2)
385
+ elif orig_file_path.lower().endswith('.json') and target_format == 'docx':
386
+ # JSON to DOCX
387
+ with open(orig_file_path, 'r', encoding='utf-8') as f:
388
+ doc_json = json.load(f)
389
+ doc = DocxDocument()
390
+ image_dir = os.path.dirname(orig_file_path)
391
+ # Sections (headers/footers)
392
+ if "sections" in doc_json:
393
+ # Ensure doc has enough sections
394
+ while len(doc.sections) < len(doc_json["sections"]):
395
+ doc.add_section()
396
+ for idx, sec in enumerate(doc_json["sections"]):
397
+ section = doc.sections[idx]
398
+ for htype, attr in [("header", "header"), ("first_page_header", "first_page_header"), ("even_page_header", "even_page_header"),
399
+ ("footer", "footer"), ("first_page_footer", "first_page_footer"), ("even_page_footer", "even_page_footer")]:
400
+ if htype in sec:
401
+ part = getattr(section, attr, None)
402
+ if part:
403
+ # Remove all default paragraphs
404
+ for p in list(part.paragraphs):
405
+ p._element.getparent().remove(p._element)
406
+ add_blocks_to_doc(part, sec[htype], image_dir)
407
+ # Body
408
+ if "body" in doc_json:
409
+ add_blocks_to_doc(doc, doc_json["body"], image_dir)
410
+ # Metadata
411
+ if "metadata" in doc_json:
412
+ meta = doc_json["metadata"]
413
+ if "title" in meta:
414
+ doc.core_properties.title = meta["title"]
415
+ if "author" in meta:
416
+ doc.core_properties.author = meta["author"]
417
+ doc.save(output_file)
418
+ else:
419
+ # Use Pandoc for other conversions
420
+ pypandoc.convert_file(
421
+ orig_file_path,
422
+ target_format.lower(),
423
+ outputfile=output_file,
424
+ )
425
+ input_preview = get_preview(orig_file_path)
426
+ output_preview = get_preview(output_file)
427
+ return input_preview, output_preview, output_file
428
+ except Exception as e:
429
+ return f"Error: {e}", None, None
430
+
431
+ def parity_check(docx_path):
432
+ import tempfile
433
+ print(f"[Parity Check] Testing round-trip for: {docx_path}")
434
+ class FileLike: # Fake file-like for CLI
435
+ def __init__(self, name): self.name = name
436
+ _, _, json_out = convert_document(FileLike(docx_path), 'json')
437
+ if not json_out or not os.path.exists(json_out):
438
+ print("Failed to produce JSON from DOCX.")
439
+ return False
440
+ _, _, docx_out = convert_document(FileLike(json_out), 'docx')
441
+ if not docx_out or not os.path.exists(docx_out):
442
+ print("Failed to produce DOCX from JSON.")
443
+ return False
444
+ def extract_all_sections_for_parity(docx_path):
445
+ doc = Document(docx_path)
446
+ sections = []
447
+ for idx, section in enumerate(doc.sections):
448
+ sec = {}
449
+ for htype, attr in [("header", "header"), ("first_page_header", "first_page_header"), ("even_page_header", "even_page_header"),
450
+ ("footer", "footer"), ("first_page_footer", "first_page_footer"), ("even_page_footer", "even_page_footer")]:
451
+ part = getattr(section, attr, None)
452
+ if part:
453
+ sec[htype] = extract_blocks(part, os.path.dirname(docx_path), f"sec{idx}_{htype}")
454
+ sections.append(sec)
455
+ body = extract_blocks(doc, os.path.dirname(docx_path), os.path.splitext(os.path.basename(docx_path))[0])
456
+ return {"sections": sections, "body": body}
457
+ orig = extract_all_sections_for_parity(docx_path)
458
+ roundtrip = extract_all_sections_for_parity(docx_out)
459
+ import difflib, pprint
460
+ def blocks_to_str(blocks):
461
+ return pprint.pformat(blocks, width=120)
462
+ if orig == roundtrip:
463
+ print("[Parity Check] PASS: Round-trip blocks are identical!")
464
+ return True
465
+ else:
466
+ print("[Parity Check] FAIL: Differences found.")
467
+ # Compare per section
468
+ for idx, (orig_sec, round_sec) in enumerate(zip(orig["sections"], roundtrip["sections"])):
469
+ if orig_sec != round_sec:
470
+ print(f"Section {idx} header/footer mismatch:")
471
+ diff = difflib.unified_diff(blocks_to_str(orig_sec).splitlines(), blocks_to_str(round_sec).splitlines(), fromfile='original', tofile='roundtrip', lineterm='')
472
+ print('\n'.join(diff))
473
+ if orig["body"] != roundtrip["body"]:
474
+ print("Body mismatch:")
475
+ diff = difflib.unified_diff(blocks_to_str(orig["body"]).splitlines(), blocks_to_str(roundtrip["body"]).splitlines(), fromfile='original', tofile='roundtrip', lineterm='')
476
+ print('\n'.join(diff))
477
+ return False
478
+
479
+ with gr.Blocks(css="footer {visibility: hidden}") as demo:
480
+ gr.Markdown("# Document Format Converter\nUpload a document and preview as JSON. Select a format to download in another format.")
481
+ with gr.Row():
482
+ with gr.Column():
483
+ input_file = gr.File(label="Upload Document", file_types=[f'.{ext.lower()}' for ext in input_supported_formats])
484
+ input_preview = gr.HTML(label="JSON Preview")
485
+ with gr.Column():
486
+ output_format = gr.Dropdown(label="Download As...", choices=output_supported_formats, value="DOCX")
487
+ format_label = gr.Markdown("Previewing as: DOCX")
488
+ output_preview = gr.HTML(label="Output Preview")
489
+ output_file = gr.File(label="Download Converted Document", visible=True)
490
+ json_state = gr.State()
491
+ orig_file_state = gr.State()
492
+
493
+ def upload_and_preview(doc_file):
494
+ _, _, json_path = convert_document(doc_file, "json")
495
+ # Handle conversion failure
496
+ if not json_path or not os.path.exists(json_path):
497
+ error_msg = "Error converting document to JSON."
498
+ return f"<pre style='max-height:300px;overflow:auto'>{error_msg}</pre>", "", doc_file.name
499
+ # Read and preview JSON content
500
+ try:
501
+ with open(json_path, "r", encoding="utf-8") as f:
502
+ json_content = f.read()
503
+ except Exception as e:
504
+ error_msg = f"Error reading JSON: {e}"
505
+ return f"<pre style='max-height:300px;overflow:auto'>{error_msg}</pre>", "", doc_file.name
506
+ preview_html = f"<pre style='max-height:300px;overflow:auto'>{json_content[:4000]}</pre>"
507
+ return preview_html, json_content, doc_file.name
508
+
509
+ def convert_and_preview(orig_file_path, output_format):
510
+ class F:
511
+ name = orig_file_path
512
+ _, _, out_path = convert_document(F(), output_format.lower())
513
+ preview = get_preview(out_path)
514
+ return f"Previewing as: {output_format}", preview, out_path
515
+
516
+ input_file.upload(upload_and_preview, inputs=input_file, outputs=[input_preview, json_state, orig_file_state])
517
+ output_format.change(convert_and_preview, inputs=[orig_file_state, output_format], outputs=[format_label, output_preview, output_file])
518
+
519
+ if __name__ == "__main__":
520
+ if len(sys.argv) == 3 and sys.argv[1] == "--parity-check":
521
+ parity_check(sys.argv[2])
522
+ sys.exit(0)
523
+
524
+ # Create Flask app for API endpoints
525
+ app = Flask(__name__)
526
+
527
+ @app.route('/api/docx-to-json', methods=['POST'])
528
+ def api_docx_to_json():
529
+ if 'file' not in request.files:
530
+ return jsonify({"error": "No file part"}), 400
531
+
532
+ file = request.files['file']
533
+ if file.filename == '':
534
+ return jsonify({"error": "No selected file"}), 400
535
+
536
+ if not file.filename.lower().endswith('.docx'):
537
+ return jsonify({"error": "File must be a DOCX document"}), 400
538
+
539
+ # Save the uploaded file
540
+ temp_dir = tempfile.mkdtemp()
541
+ file_path = os.path.join(temp_dir, file.filename)
542
+ file.save(file_path)
543
+
544
+ try:
545
+ # Convert to JSON
546
+ _, _, json_path = convert_document(type('obj', (object,), {'name': file_path}), "json")
547
+
548
+ if not json_path or not os.path.exists(json_path):
549
+ return jsonify({"error": "Error converting document to JSON"}), 500
550
+
551
+ # Read JSON content
552
+ with open(json_path, "r", encoding="utf-8") as f:
553
+ json_content = json.load(f)
554
+
555
+ return jsonify(json_content)
556
+ except Exception as e:
557
+ return jsonify({"error": str(e)}), 500
558
+
559
+ @app.route('/api/json-to-docx', methods=['POST'])
560
+ def api_json_to_docx():
561
+ if not request.is_json:
562
+ return jsonify({"error": "Request must be JSON"}), 400
563
+
564
+ try:
565
+ # Save the JSON to a temporary file
566
+ temp_dir = tempfile.mkdtemp()
567
+ json_path = os.path.join(temp_dir, "document.json")
568
+
569
+ with open(json_path, "w", encoding="utf-8") as f:
570
+ json.dump(request.json, f)
571
+
572
+ # Convert to DOCX
573
+ _, _, docx_path = convert_document(type('obj', (object,), {'name': json_path}), "docx")
574
+
575
+ if not docx_path or not os.path.exists(docx_path):
576
+ return jsonify({"error": "Error converting JSON to DOCX"}), 500
577
+
578
+ return send_file(docx_path, as_attachment=True, download_name="converted.docx")
579
+ except Exception as e:
580
+ return jsonify({"error": str(e)}), 500
581
+
582
+ # Run both Gradio and Flask
583
+ def run_flask():
584
+ app.run(host='0.0.0.0', port=5000)
585
+
586
+ # Start Flask in a separate thread
587
+ flask_thread = threading.Thread(target=run_flask)
588
+ flask_thread.daemon = True
589
+ flask_thread.start()
590
+
591
+ # Start Gradio
592
+ demo.launch(share=True)
default.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2094b5bddffe9cf973d61fe03388413804f034160718494a65db7e98da40d35d
3
+ size 38116
distutils.schema.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+
4
+ "$id": "https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html",
5
+ "title": "``tool.distutils`` table",
6
+ "$$description": [
7
+ "**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``",
8
+ "subtables to configure arguments for ``distutils`` commands.",
9
+ "Originally, ``distutils`` allowed developers to configure arguments for",
10
+ "``setup.py`` commands via `distutils configuration files",
11
+ "<https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html>`_.",
12
+ "See also `the old Python docs <https://docs.python.org/3.11/install/>_`."
13
+ ],
14
+
15
+ "type": "object",
16
+ "properties": {
17
+ "global": {
18
+ "type": "object",
19
+ "description": "Global options applied to all ``distutils`` commands"
20
+ }
21
+ },
22
+ "patternProperties": {
23
+ ".+": {"type": "object"}
24
+ },
25
+ "$comment": "TODO: Is there a practical way of making this schema more specific?"
26
+ }
docgen_000000cgen_3_12-25 Steve Hepner Tail N15Y Jackson Jet 3.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:897a8e9fc721f7b6ada35683aa2911d0a0b905c4910805be3dea2dd2dc0e1146
3
+ size 41520
docgen_00000_3_12-25 Steve Hepner Tail N15Y Jackson Jet 1.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f92619c545b24a112256821a872bcd01076e84458fdd82483ad12961d14ecbee
3
+ size 11792
docgen_010103_12-25 Steve Hepner Tail N15Y Jackson Jet 2.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f46daf14cdb435f36746bdacfc9bbe2b1ab02134cfc8eeefec6c83f89568377
3
+ size 39598
docgen_3_12-25 Steve Hepner Tail N15Y Jackson Jet.csljson ADDED
@@ -0,0 +1 @@
 
 
1
+ []
docgen_3_12-25 Steve Hepner Tail N15Y Jackson Jet.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b95e6ce2fc012c96a02521ef4db76259001ccf6b8238eaac4244cfcfdef924f1
3
+ size 11791
docgen_3_12-25 Steve Hepner Tail N15Y Jackson Jet.json ADDED
@@ -0,0 +1,1337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "sections": [
3
+ {
4
+ "header": [
5
+ {
6
+ "type": "table",
7
+ "rows": [
8
+ [
9
+ [],
10
+ [
11
+ {
12
+ "type": "paragraph",
13
+ "runs": [
14
+ {
15
+ "text": "Steve Hepner / Wednesday March 12 @ 7:30am / Jackson Jet-",
16
+ "bold": true,
17
+ "font_size": 16.0,
18
+ "highlight": "TURQUOISE (3)"
19
+ },
20
+ {
21
+ "text": " ",
22
+ "bold": true,
23
+ "font_size": 16.0
24
+ }
25
+ ],
26
+ "alignment": "CENTER (1)",
27
+ "style": "Normal"
28
+ },
29
+ {
30
+ "type": "paragraph",
31
+ "runs": [
32
+ {
33
+ "text": "TAIL # N15Y",
34
+ "bold": true,
35
+ "font_size": 16.0,
36
+ "highlight": "TURQUOISE (3)"
37
+ },
38
+ {
39
+ "text": "/ ",
40
+ "bold": true,
41
+ "font_size": 16.0,
42
+ "color": "000000",
43
+ "highlight": "TURQUOISE (3)"
44
+ },
45
+ {
46
+ "text": "801-558-2321",
47
+ "bold": true,
48
+ "font_size": 14.0,
49
+ "font_name": "Arial",
50
+ "color": "000000",
51
+ "highlight": "TURQUOISE (3)"
52
+ }
53
+ ],
54
+ "alignment": "CENTER (1)",
55
+ "style": "Normal"
56
+ }
57
+ ],
58
+ []
59
+ ]
60
+ ]
61
+ }
62
+ ],
63
+ "first_page_header": [],
64
+ "even_page_header": [],
65
+ "footer": [
66
+ {
67
+ "type": "paragraph",
68
+ "runs": [
69
+ {
70
+ "text": ""
71
+ },
72
+ {
73
+ "text": ""
74
+ },
75
+ {
76
+ "text": ""
77
+ },
78
+ {
79
+ "text": "1"
80
+ },
81
+ {
82
+ "text": ""
83
+ }
84
+ ],
85
+ "alignment": "RIGHT (2)",
86
+ "style": "Footer"
87
+ }
88
+ ],
89
+ "first_page_footer": [],
90
+ "even_page_footer": []
91
+ }
92
+ ],
93
+ "body": [
94
+ {
95
+ "type": "paragraph",
96
+ "runs": [
97
+ {
98
+ "text": "Thank you for letting us know there WILL be a FA on Board",
99
+ "font_name": "Arial",
100
+ "color": "000000"
101
+ },
102
+ {
103
+ "text": "…. ",
104
+ "font_name": "Arial",
105
+ "color": "FF0000"
106
+ }
107
+ ],
108
+ "alignment": "left",
109
+ "style": "Normal"
110
+ },
111
+ {
112
+ "type": "paragraph",
113
+ "runs": [
114
+ {
115
+ "text": "S",
116
+ "bold": true,
117
+ "font_size": 18.0,
118
+ "font_name": "Arial",
119
+ "color": "222222"
120
+ },
121
+ {
122
+ "text": "andwiches:",
123
+ "bold": true,
124
+ "font_size": 18.0,
125
+ "font_name": "Arial",
126
+ "color": "222222"
127
+ }
128
+ ],
129
+ "alignment": "left",
130
+ "style": "Normal"
131
+ },
132
+ {
133
+ "type": "paragraph",
134
+ "runs": [
135
+ {
136
+ "text": "______",
137
+ "font_name": "Arial",
138
+ "color": "222222"
139
+ },
140
+ {
141
+ "text": "6",
142
+ "font_name": "Arial",
143
+ "color": "222222"
144
+ },
145
+ {
146
+ "text": " pax - ",
147
+ "font_name": "Arial",
148
+ "color": "222222"
149
+ },
150
+ {
151
+ "text": "Build your own ",
152
+ "font_name": "Arial",
153
+ "color": "222222"
154
+ },
155
+ {
156
+ "text": "Deli Platter ",
157
+ "font_name": "Arial",
158
+ "color": "222222"
159
+ },
160
+ {
161
+ "text": "(various breads, meats, cheeses, condiments) ",
162
+ "font_name": "Arial",
163
+ "color": "222222"
164
+ }
165
+ ],
166
+ "alignment": "left",
167
+ "style": "Normal"
168
+ },
169
+ {
170
+ "type": "paragraph",
171
+ "runs": [
172
+ {
173
+ "text": " ",
174
+ "font_name": "Arial",
175
+ "color": "FF0000"
176
+ },
177
+ {
178
+ "text": "on MELAMINE ",
179
+ "bold": true,
180
+ "font_name": "Arial"
181
+ }
182
+ ],
183
+ "alignment": "left",
184
+ "style": "Normal"
185
+ },
186
+ {
187
+ "type": "list_item",
188
+ "list_type": "bullet",
189
+ "runs": [
190
+ {
191
+ "text": "Assorted Sandwich Breads for ",
192
+ "font_name": "Arial",
193
+ "color": "222222"
194
+ },
195
+ {
196
+ "text": "6",
197
+ "font_name": "Arial",
198
+ "color": "222222"
199
+ },
200
+ {
201
+ "text": " pax ",
202
+ "font_name": "Arial",
203
+ "color": "222222"
204
+ }
205
+ ],
206
+ "alignment": "left",
207
+ "style": "List Paragraph"
208
+ },
209
+ {
210
+ "type": "list_item",
211
+ "list_type": "bullet",
212
+ "runs": [
213
+ {
214
+ "text": "Meats and cheese for ",
215
+ "font_name": "Arial",
216
+ "color": "222222"
217
+ },
218
+ {
219
+ "text": "6",
220
+ "font_name": "Arial",
221
+ "color": "222222"
222
+ },
223
+ {
224
+ "text": " pax sandwiches- melamine",
225
+ "font_name": "Arial",
226
+ "color": "222222"
227
+ }
228
+ ],
229
+ "alignment": "left",
230
+ "style": "List Paragraph"
231
+ },
232
+ {
233
+ "type": "list_item",
234
+ "list_type": "bullet",
235
+ "runs": [
236
+ {
237
+ "text": "Sandwich garnishes ",
238
+ "font_name": "Arial",
239
+ "color": "222222"
240
+ },
241
+ {
242
+ "text": "for 6 ",
243
+ "font_name": "Arial",
244
+ "color": "222222"
245
+ },
246
+ {
247
+ "text": "on melamine",
248
+ "font_name": "Arial",
249
+ "color": "222222"
250
+ }
251
+ ],
252
+ "alignment": "left",
253
+ "style": "List Paragraph"
254
+ },
255
+ {
256
+ "type": "list_item",
257
+ "list_type": "bullet",
258
+ "runs": [
259
+ {
260
+ "text": "Mustard and Mayo on side",
261
+ "font_name": "Arial",
262
+ "color": "222222"
263
+ }
264
+ ],
265
+ "alignment": "left",
266
+ "style": "List Paragraph"
267
+ },
268
+ {
269
+ "type": "paragraph",
270
+ "runs": [
271
+ {
272
+ "text": "Salads:",
273
+ "bold": true,
274
+ "font_size": 18.0,
275
+ "font_name": "Arial",
276
+ "color": "222222"
277
+ },
278
+ {
279
+ "text": " ",
280
+ "font_size": 18.0,
281
+ "font_name": "Arial",
282
+ "color": "222222"
283
+ },
284
+ {
285
+ "text": "BULK for ",
286
+ "bold": true,
287
+ "italic": true,
288
+ "font_name": "Arial"
289
+ },
290
+ {
291
+ "text": "6",
292
+ "bold": true,
293
+ "italic": true,
294
+ "font_name": "Arial"
295
+ },
296
+ {
297
+ "text": " pax each",
298
+ "bold": true,
299
+ "italic": true,
300
+ "font_name": "Arial"
301
+ }
302
+ ],
303
+ "alignment": "left",
304
+ "style": "Normal",
305
+ "line_spacing": 1.0791666666666666
306
+ },
307
+ {
308
+ "type": "paragraph",
309
+ "runs": [
310
+ {
311
+ "text": "______",
312
+ "bold": true,
313
+ "font_size": 14.0,
314
+ "font_name": "Arial",
315
+ "color": "222222"
316
+ },
317
+ {
318
+ "text": "6",
319
+ "bold": true,
320
+ "font_size": 14.0,
321
+ "font_name": "Arial",
322
+ "color": "222222"
323
+ },
324
+ {
325
+ "text": " pax – Chicken Caesar Salads - BULK-",
326
+ "font_size": 14.0,
327
+ "font_name": "Arial",
328
+ "color": "222222"
329
+ },
330
+ {
331
+ "text": " ",
332
+ "font_name": "Arial",
333
+ "color": "222222"
334
+ },
335
+ {
336
+ "text": "and then together in large zip",
337
+ "font_name": "Arial",
338
+ "color": "222222"
339
+ }
340
+ ],
341
+ "alignment": "left",
342
+ "style": "Normal"
343
+ },
344
+ {
345
+ "type": "list_item",
346
+ "list_type": "bullet",
347
+ "runs": [
348
+ {
349
+ "text": "Romaine for "
350
+ },
351
+ {
352
+ "text": "6"
353
+ },
354
+ {
355
+ "text": ", "
356
+ }
357
+ ],
358
+ "alignment": "left",
359
+ "style": "List Paragraph"
360
+ },
361
+ {
362
+ "type": "list_item",
363
+ "list_type": "bullet",
364
+ "runs": [
365
+ {
366
+ "text": "Shaved Parmesan"
367
+ },
368
+ {
369
+ "text": " for 6 salads"
370
+ },
371
+ {
372
+ "text": ", "
373
+ }
374
+ ],
375
+ "alignment": "left",
376
+ "style": "List Paragraph"
377
+ },
378
+ {
379
+ "type": "list_item",
380
+ "list_type": "bullet",
381
+ "runs": [
382
+ {
383
+ "text": "Croutons"
384
+ },
385
+ {
386
+ "text": " – sandwich zip"
387
+ }
388
+ ],
389
+ "alignment": "left",
390
+ "style": "List Paragraph"
391
+ },
392
+ {
393
+ "type": "list_item",
394
+ "list_type": "bullet",
395
+ "runs": [
396
+ {
397
+ "text": "1 Pint"
398
+ },
399
+ {
400
+ "text": " - Caesar Dressing"
401
+ }
402
+ ],
403
+ "alignment": "left",
404
+ "style": "List Paragraph"
405
+ },
406
+ {
407
+ "type": "list_item",
408
+ "list_type": "bullet",
409
+ "runs": [
410
+ {
411
+ "text": "Grilled Chicken Breast - sliced for "
412
+ },
413
+ {
414
+ "text": "6"
415
+ }
416
+ ],
417
+ "alignment": "left",
418
+ "style": "List Paragraph"
419
+ },
420
+ {
421
+ "type": "paragraph",
422
+ "runs": [
423
+ {
424
+ "text": "______",
425
+ "bold": true,
426
+ "font_size": 14.0,
427
+ "font_name": "Arial",
428
+ "color": "222222"
429
+ },
430
+ {
431
+ "text": "6",
432
+ "bold": true,
433
+ "font_size": 14.0,
434
+ "font_name": "Arial",
435
+ "color": "222222"
436
+ },
437
+ {
438
+ "text": " pax - Oriental Chicken Salads – BULK ",
439
+ "font_size": 14.0,
440
+ "font_name": "Arial",
441
+ "color": "222222"
442
+ },
443
+ {
444
+ "text": "and then together in large zip",
445
+ "font_name": "Arial",
446
+ "color": "222222"
447
+ },
448
+ {
449
+ "text": " ",
450
+ "font_name": "Arial",
451
+ "color": "FF0000"
452
+ }
453
+ ],
454
+ "alignment": "left",
455
+ "style": "Normal"
456
+ },
457
+ {
458
+ "type": "list_item",
459
+ "list_type": "bullet",
460
+ "runs": [
461
+ {
462
+ "text": "Romaine for 4"
463
+ }
464
+ ],
465
+ "alignment": "left",
466
+ "style": "List Paragraph"
467
+ },
468
+ {
469
+ "type": "list_item",
470
+ "list_type": "bullet",
471
+ "runs": [
472
+ {
473
+ "text": "Chopped Cabbage"
474
+ }
475
+ ],
476
+ "alignment": "left",
477
+ "style": "List Paragraph"
478
+ },
479
+ {
480
+ "type": "list_item",
481
+ "list_type": "bullet",
482
+ "runs": [
483
+ {
484
+ "text": "Shredded Carrots- "
485
+ }
486
+ ],
487
+ "alignment": "left",
488
+ "style": "List Paragraph"
489
+ },
490
+ {
491
+ "type": "list_item",
492
+ "list_type": "bullet",
493
+ "runs": [
494
+ {
495
+ "text": "Colorful Peppers- "
496
+ }
497
+ ],
498
+ "alignment": "left",
499
+ "style": "List Paragraph"
500
+ },
501
+ {
502
+ "type": "list_item",
503
+ "list_type": "bullet",
504
+ "runs": [
505
+ {
506
+ "text": "Scallions- "
507
+ }
508
+ ],
509
+ "alignment": "left",
510
+ "style": "List Paragraph"
511
+ },
512
+ {
513
+ "type": "list_item",
514
+ "list_type": "bullet",
515
+ "runs": [
516
+ {
517
+ "text": "Mandarin Oranges- 8oz, "
518
+ }
519
+ ],
520
+ "alignment": "left",
521
+ "style": "List Paragraph"
522
+ },
523
+ {
524
+ "type": "list_item",
525
+ "list_type": "bullet",
526
+ "runs": [
527
+ {
528
+ "text": "Cucumber- 8oz, "
529
+ }
530
+ ],
531
+ "alignment": "left",
532
+ "style": "List Paragraph"
533
+ },
534
+ {
535
+ "type": "list_item",
536
+ "list_type": "bullet",
537
+ "runs": [
538
+ {
539
+ "text": "Chopped Peanuts – snack bag "
540
+ }
541
+ ],
542
+ "alignment": "left",
543
+ "style": "List Paragraph"
544
+ },
545
+ {
546
+ "type": "list_item",
547
+ "list_type": "bullet",
548
+ "runs": [
549
+ {
550
+ "text": "White and Black Sesame Seed Garnish - "
551
+ },
552
+ {
553
+ "text": "3"
554
+ },
555
+ {
556
+ "text": "oz"
557
+ }
558
+ ],
559
+ "alignment": "left",
560
+ "style": "List Paragraph"
561
+ },
562
+ {
563
+ "type": "list_item",
564
+ "list_type": "bullet",
565
+ "runs": [
566
+ {
567
+ "text": "12"
568
+ },
569
+ {
570
+ "text": "oz "
571
+ },
572
+ {
573
+ "text": "- "
574
+ },
575
+ {
576
+ "text": "Sesame Vinaigrette"
577
+ }
578
+ ],
579
+ "alignment": "left",
580
+ "style": "List Paragraph"
581
+ },
582
+ {
583
+ "type": "list_item",
584
+ "list_type": "bullet",
585
+ "runs": [
586
+ {
587
+ "text": "6 pax - Grilled Chicken Breast – sliced for salads"
588
+ }
589
+ ],
590
+ "alignment": "left",
591
+ "style": "List Paragraph"
592
+ },
593
+ {
594
+ "type": "paragraph",
595
+ "runs": [
596
+ {
597
+ "text": "______",
598
+ "bold": true,
599
+ "font_size": 14.0,
600
+ "font_name": "Arial",
601
+ "color": "222222"
602
+ },
603
+ {
604
+ "text": "6",
605
+ "bold": true,
606
+ "font_size": 14.0,
607
+ "font_name": "Arial",
608
+ "color": "222222"
609
+ },
610
+ {
611
+ "text": " pax - Nuts and Berries Salad-",
612
+ "font_size": 14.0,
613
+ "font_name": "Arial",
614
+ "color": "222222"
615
+ },
616
+ {
617
+ "text": " ",
618
+ "font_size": 14.0,
619
+ "font_name": "Arial",
620
+ "color": "FF0000"
621
+ },
622
+ {
623
+ "text": "BULK - ",
624
+ "font_size": 14.0,
625
+ "font_name": "Arial"
626
+ },
627
+ {
628
+ "text": "and then together in large zip",
629
+ "font_name": "Arial",
630
+ "color": "222222"
631
+ }
632
+ ],
633
+ "alignment": "left",
634
+ "style": "Normal",
635
+ "line_spacing": 1.0791666666666666
636
+ },
637
+ {
638
+ "type": "list_item",
639
+ "list_type": "bullet",
640
+ "runs": [
641
+ {
642
+ "text": "Mixed Greens for "
643
+ },
644
+ {
645
+ "text": "6"
646
+ },
647
+ {
648
+ "text": ", "
649
+ }
650
+ ],
651
+ "alignment": "left",
652
+ "style": "List Paragraph",
653
+ "line_spacing": 1.0791666666666666
654
+ },
655
+ {
656
+ "type": "list_item",
657
+ "list_type": "bullet",
658
+ "runs": [
659
+ {
660
+ "text": "Assorted Dried Berries("
661
+ },
662
+ {
663
+ "text": "12"
664
+ },
665
+ {
666
+ "text": "oz), "
667
+ }
668
+ ],
669
+ "alignment": "left",
670
+ "style": "List Paragraph",
671
+ "line_spacing": 1.0791666666666666
672
+ },
673
+ {
674
+ "type": "list_item",
675
+ "list_type": "bullet",
676
+ "runs": [
677
+ {
678
+ "text": "Fresh Berries- pint, "
679
+ }
680
+ ],
681
+ "alignment": "left",
682
+ "style": "List Paragraph",
683
+ "line_spacing": 1.0791666666666666
684
+ },
685
+ {
686
+ "type": "list_item",
687
+ "list_type": "bullet",
688
+ "runs": [
689
+ {
690
+ "text": "Sunflower Seeds - 4oz "
691
+ }
692
+ ],
693
+ "alignment": "left",
694
+ "style": "List Paragraph",
695
+ "line_spacing": 1.0791666666666666
696
+ },
697
+ {
698
+ "type": "list_item",
699
+ "list_type": "bullet",
700
+ "runs": [
701
+ {
702
+ "text": "Nuts - 8oz"
703
+ }
704
+ ],
705
+ "alignment": "left",
706
+ "style": "List Paragraph",
707
+ "line_spacing": 1.0791666666666666
708
+ },
709
+ {
710
+ "type": "list_item",
711
+ "list_type": "bullet",
712
+ "runs": [
713
+ {
714
+ "text": "1 pint"
715
+ },
716
+ {
717
+ "text": " "
718
+ },
719
+ {
720
+ "text": "- "
721
+ },
722
+ {
723
+ "text": "Pomegranate Vinaigrette"
724
+ }
725
+ ],
726
+ "alignment": "left",
727
+ "style": "List Paragraph",
728
+ "line_spacing": 1.0791666666666666
729
+ },
730
+ {
731
+ "type": "paragraph",
732
+ "runs": [
733
+ {
734
+ "text": "______ ",
735
+ "bold": true,
736
+ "font_name": "Arial",
737
+ "color": "222222"
738
+ },
739
+ {
740
+ "text": "6",
741
+ "bold": true,
742
+ "font_name": "Arial",
743
+ "color": "222222"
744
+ },
745
+ {
746
+ "text": " pax = ",
747
+ "bold": true,
748
+ "font_name": "Arial",
749
+ "color": "222222"
750
+ },
751
+ {
752
+ "text": "2",
753
+ "bold": true,
754
+ "font_name": "Arial",
755
+ "color": "222222"
756
+ },
757
+ {
758
+ "text": ".5 Pints - ",
759
+ "bold": true,
760
+ "font_name": "Arial",
761
+ "color": "222222"
762
+ },
763
+ {
764
+ "text": "Pasta",
765
+ "font_name": "Arial",
766
+ "color": "222222"
767
+ },
768
+ {
769
+ "text": " Salad",
770
+ "font_name": "Arial",
771
+ "color": "222222"
772
+ },
773
+ {
774
+ "text": "-",
775
+ "font_name": "Arial",
776
+ "color": "222222"
777
+ },
778
+ {
779
+ "text": " ",
780
+ "font_name": "Arial",
781
+ "color": "222222"
782
+ },
783
+ {
784
+ "text": "Chef Choice",
785
+ "font_name": "Arial"
786
+ }
787
+ ],
788
+ "alignment": "left",
789
+ "style": "Normal"
790
+ },
791
+ {
792
+ "type": "paragraph",
793
+ "runs": [
794
+ {
795
+ "text": "Fruits and Veggies",
796
+ "bold": true,
797
+ "font_size": 18.0,
798
+ "font_name": "Arial",
799
+ "color": "222222"
800
+ },
801
+ {
802
+ "text": ":",
803
+ "bold": true,
804
+ "font_size": 18.0,
805
+ "font_name": "Arial",
806
+ "color": "222222"
807
+ }
808
+ ],
809
+ "alignment": "left",
810
+ "style": "Normal"
811
+ },
812
+ {
813
+ "type": "paragraph",
814
+ "runs": [
815
+ {
816
+ "text": "______",
817
+ "bold": true,
818
+ "font_size": 14.0,
819
+ "font_name": "Arial",
820
+ "color": "222222"
821
+ },
822
+ {
823
+ "text": "6",
824
+ "bold": true,
825
+ "font_size": 14.0,
826
+ "font_name": "Arial",
827
+ "color": "222222"
828
+ },
829
+ {
830
+ "text": " pax - ",
831
+ "bold": true,
832
+ "font_size": 14.0,
833
+ "font_name": "Arial",
834
+ "color": "222222"
835
+ },
836
+ {
837
+ "text": "Assorted Fruit",
838
+ "bold": true,
839
+ "font_size": 14.0,
840
+ "font_name": "Arial",
841
+ "color": "222222"
842
+ },
843
+ {
844
+ "text": " Platter-",
845
+ "bold": true,
846
+ "font_size": 14.0,
847
+ "font_name": "Arial",
848
+ "color": "222222"
849
+ },
850
+ {
851
+ "text": " ",
852
+ "bold": true,
853
+ "font_size": 14.0,
854
+ "font_name": "Arial",
855
+ "color": "FF0000"
856
+ },
857
+ {
858
+ "text": "Melamine",
859
+ "bold": true,
860
+ "font_size": 14.0,
861
+ "font_name": "Arial"
862
+ }
863
+ ],
864
+ "alignment": "left",
865
+ "style": "Normal"
866
+ },
867
+ {
868
+ "type": "paragraph",
869
+ "runs": [
870
+ {
871
+ "text": "______",
872
+ "bold": true,
873
+ "font_size": 14.0,
874
+ "font_name": "Arial",
875
+ "color": "222222"
876
+ },
877
+ {
878
+ "text": "6",
879
+ "bold": true,
880
+ "font_size": 14.0,
881
+ "font_name": "Arial",
882
+ "color": "222222"
883
+ },
884
+ {
885
+ "text": " pax – VVIP Crudites with Hummus and Assorted Dips ",
886
+ "bold": true,
887
+ "font_size": 14.0,
888
+ "font_name": "Arial",
889
+ "color": "222222"
890
+ },
891
+ {
892
+ "text": "–",
893
+ "font_name": "Arial",
894
+ "color": "222222"
895
+ },
896
+ {
897
+ "text": " ",
898
+ "font_name": "Arial",
899
+ "color": "FF0000"
900
+ },
901
+ {
902
+ "text": "Garnished ",
903
+ "font_name": "Arial"
904
+ },
905
+ {
906
+ "text": "M",
907
+ "font_name": "Arial"
908
+ },
909
+ {
910
+ "text": "elamine",
911
+ "font_name": "Arial"
912
+ }
913
+ ],
914
+ "alignment": "left",
915
+ "style": "Normal"
916
+ },
917
+ {
918
+ "type": "paragraph",
919
+ "runs": [
920
+ {
921
+ "text": "______An Elegant Selection of Crunchy Vegetables - Melamine Platter",
922
+ "font_size": 14.0
923
+ },
924
+ {
925
+ "text": " for 6",
926
+ "font_size": 14.0
927
+ }
928
+ ],
929
+ "alignment": "left",
930
+ "style": "Normal"
931
+ },
932
+ {
933
+ "type": "paragraph",
934
+ "runs": [
935
+ {
936
+ "text": "______Traditional Creamy Hummus",
937
+ "font_size": 14.0
938
+ },
939
+ {
940
+ "text": " -",
941
+ "font_size": 14.0
942
+ },
943
+ {
944
+ "text": " Garnished - melamine bowl",
945
+ "font_size": 14.0
946
+ },
947
+ {
948
+ "text": " for 6 ",
949
+ "font_size": 14.0
950
+ }
951
+ ],
952
+ "alignment": "left",
953
+ "style": "Normal"
954
+ },
955
+ {
956
+ "type": "paragraph",
957
+ "runs": [
958
+ {
959
+ "text": "______ Creamy Dill Ranch - melamine bowl for ",
960
+ "font_size": 14.0
961
+ },
962
+ {
963
+ "text": "6",
964
+ "font_size": 14.0
965
+ }
966
+ ],
967
+ "alignment": "left",
968
+ "style": "Normal"
969
+ },
970
+ {
971
+ "type": "paragraph",
972
+ "runs": [
973
+ {
974
+ "text": "______ ",
975
+ "font_size": 14.0
976
+ },
977
+ {
978
+ "text": "Cusabi",
979
+ "font_size": 14.0
980
+ },
981
+ {
982
+ "text": " Dipping Sauce – melamine bowl",
983
+ "font_size": 14.0
984
+ },
985
+ {
986
+ "text": " for 6",
987
+ "font_size": 14.0
988
+ }
989
+ ],
990
+ "alignment": "left",
991
+ "style": "Normal"
992
+ },
993
+ {
994
+ "type": "paragraph",
995
+ "runs": [
996
+ {
997
+ "text": "_____ ",
998
+ "bold": true,
999
+ "font_size": 14.0,
1000
+ "font_name": "Arial",
1001
+ "color": "222222"
1002
+ },
1003
+ {
1004
+ "text": "6",
1005
+ "bold": true,
1006
+ "font_size": 14.0,
1007
+ "font_name": "Arial",
1008
+ "color": "222222"
1009
+ },
1010
+ {
1011
+ "text": " pax – Assorted Mini Desserts and Homemade Brownies",
1012
+ "bold": true,
1013
+ "font_size": 14.0,
1014
+ "font_name": "Arial",
1015
+ "color": "222222"
1016
+ },
1017
+ {
1018
+ "text": " – 3pp = 1",
1019
+ "font_name": "Arial",
1020
+ "color": "222222"
1021
+ },
1022
+ {
1023
+ "text": "8",
1024
+ "font_name": "Arial",
1025
+ "color": "222222"
1026
+ },
1027
+ {
1028
+ "text": " minis on garnished melamine ",
1029
+ "font_name": "Arial",
1030
+ "color": "222222"
1031
+ }
1032
+ ],
1033
+ "alignment": "left",
1034
+ "style": "Normal"
1035
+ },
1036
+ {
1037
+ "type": "paragraph",
1038
+ "runs": [
1039
+ {
1040
+ "text": "Kid Meals",
1041
+ "bold": true,
1042
+ "font_size": 18.0,
1043
+ "font_name": "Arial",
1044
+ "color": "222222"
1045
+ },
1046
+ {
1047
+ "text": ":",
1048
+ "bold": true,
1049
+ "font_size": 18.0,
1050
+ "font_name": "Arial",
1051
+ "color": "222222"
1052
+ }
1053
+ ],
1054
+ "alignment": "left",
1055
+ "style": "Normal"
1056
+ },
1057
+ {
1058
+ "type": "paragraph",
1059
+ "runs": [
1060
+ {
1061
+ "text": "______",
1062
+ "font_name": "Arial",
1063
+ "color": "222222"
1064
+ },
1065
+ {
1066
+ "text": "3 ",
1067
+ "font_name": "Arial",
1068
+ "color": "222222"
1069
+ },
1070
+ {
1071
+ "text": "pax - ",
1072
+ "font_name": "Arial",
1073
+ "color": "222222"
1074
+ },
1075
+ {
1076
+ "text": "Kids Mac & ",
1077
+ "font_name": "Arial",
1078
+ "color": "222222"
1079
+ },
1080
+ {
1081
+ "text": "Cheese = ",
1082
+ "font_name": "Arial",
1083
+ "color": "222222"
1084
+ },
1085
+ {
1086
+ "text": "Micro",
1087
+ "font_name": "Arial"
1088
+ }
1089
+ ],
1090
+ "alignment": "left",
1091
+ "style": "Normal"
1092
+ },
1093
+ {
1094
+ "type": "paragraph",
1095
+ "runs": [
1096
+ {
1097
+ "text": "______",
1098
+ "font_name": "Arial",
1099
+ "color": "222222"
1100
+ },
1101
+ {
1102
+ "text": "9",
1103
+ "font_name": "Arial",
1104
+ "color": "222222"
1105
+ },
1106
+ {
1107
+ "text": " - ",
1108
+ "font_name": "Arial",
1109
+ "color": "222222"
1110
+ },
1111
+ {
1112
+ "text": "Kids Chicken Fingers",
1113
+ "font_name": "Arial",
1114
+ "color": "222222"
1115
+ },
1116
+ {
1117
+ "text": " ",
1118
+ "font_name": "Arial",
1119
+ "color": "222222"
1120
+ },
1121
+ {
1122
+ "text": "=",
1123
+ "font_name": "Arial",
1124
+ "color": "222222"
1125
+ },
1126
+ {
1127
+ "text": " ",
1128
+ "font_name": "Arial",
1129
+ "color": "222222"
1130
+ },
1131
+ {
1132
+ "text": "3 Chicken Tenders",
1133
+ "font_name": "Arial",
1134
+ "color": "222222"
1135
+ },
1136
+ {
1137
+ "text": " each for 3 kids ",
1138
+ "font_name": "Arial",
1139
+ "color": "222222"
1140
+ },
1141
+ {
1142
+ "text": "- ",
1143
+ "font_name": "Arial",
1144
+ "color": "222222"
1145
+ },
1146
+ {
1147
+ "text": "OVEN TIN",
1148
+ "font_name": "Arial"
1149
+ }
1150
+ ],
1151
+ "alignment": "left",
1152
+ "style": "Normal"
1153
+ },
1154
+ {
1155
+ "type": "paragraph",
1156
+ "runs": [
1157
+ {
1158
+ "text": "\t______BBQ- and please let us know if you would prefer honey mustard",
1159
+ "font_name": "Arial",
1160
+ "color": "222222"
1161
+ }
1162
+ ],
1163
+ "alignment": "left",
1164
+ "style": "Normal"
1165
+ },
1166
+ {
1167
+ "type": "paragraph",
1168
+ "runs": [
1169
+ {
1170
+ "text": "\t",
1171
+ "font_name": "Arial",
1172
+ "color": "222222"
1173
+ },
1174
+ {
1175
+ "text": "______",
1176
+ "bold": true,
1177
+ "font_name": "Arial",
1178
+ "color": "222222"
1179
+ },
1180
+ {
1181
+ "text": "Ranch",
1182
+ "font_name": "Arial",
1183
+ "color": "222222"
1184
+ }
1185
+ ],
1186
+ "alignment": "left",
1187
+ "style": "Normal"
1188
+ },
1189
+ {
1190
+ "type": "paragraph",
1191
+ "runs": [
1192
+ {
1193
+ "text": "______",
1194
+ "bold": true,
1195
+ "font_name": "Arial",
1196
+ "color": "222222"
1197
+ },
1198
+ {
1199
+ "text": "3 ",
1200
+ "font_name": "Arial",
1201
+ "color": "222222"
1202
+ },
1203
+ {
1204
+ "text": "pax",
1205
+ "font_name": "Arial",
1206
+ "color": "222222"
1207
+ },
1208
+ {
1209
+ "text": " -",
1210
+ "font_name": "Arial",
1211
+ "color": "222222"
1212
+ },
1213
+ {
1214
+ "text": " ",
1215
+ "font_name": "Arial",
1216
+ "color": "222222"
1217
+ },
1218
+ {
1219
+ "text": "Kids fruit",
1220
+ "font_name": "Arial",
1221
+ "color": "222222"
1222
+ },
1223
+ {
1224
+ "text": " ",
1225
+ "font_name": "Arial",
1226
+ "color": "222222"
1227
+ },
1228
+ {
1229
+ "text": "cups ",
1230
+ "font_name": "Arial",
1231
+ "color": "222222"
1232
+ },
1233
+ {
1234
+ "text": "= Fruit- ",
1235
+ "font_name": "Arial",
1236
+ "color": "222222"
1237
+ },
1238
+ {
1239
+ "text": "3 – 5oz disposable plastic cups",
1240
+ "font_name": "Arial",
1241
+ "color": "222222"
1242
+ }
1243
+ ],
1244
+ "alignment": "left",
1245
+ "style": "Normal"
1246
+ },
1247
+ {
1248
+ "type": "paragraph",
1249
+ "runs": [
1250
+ {
1251
+ "text": "______",
1252
+ "font_name": "Arial",
1253
+ "color": "222222"
1254
+ },
1255
+ {
1256
+ "text": "3 ",
1257
+ "font_name": "Arial",
1258
+ "color": "222222"
1259
+ },
1260
+ {
1261
+ "text": "pax ",
1262
+ "font_name": "Arial",
1263
+ "color": "222222"
1264
+ },
1265
+ {
1266
+ "text": "- ",
1267
+ "font_name": "Arial",
1268
+ "color": "222222"
1269
+ },
1270
+ {
1271
+ "text": "Kid",
1272
+ "font_name": "Arial",
1273
+ "color": "222222"
1274
+ },
1275
+ {
1276
+ "text": "s",
1277
+ "font_name": "Arial",
1278
+ "color": "222222"
1279
+ },
1280
+ {
1281
+ "text": " ",
1282
+ "font_name": "Arial",
1283
+ "color": "222222"
1284
+ },
1285
+ {
1286
+ "text": "dessert",
1287
+ "font_name": "Arial",
1288
+ "color": "222222"
1289
+ },
1290
+ {
1291
+ "text": "=",
1292
+ "font_name": "Arial",
1293
+ "color": "222222"
1294
+ },
1295
+ {
1296
+ "text": " ",
1297
+ "font_name": "Arial",
1298
+ "color": "222222"
1299
+ },
1300
+ {
1301
+ "text": "Chef",
1302
+ "font_name": "Arial",
1303
+ "color": "222222"
1304
+ },
1305
+ {
1306
+ "text": "s",
1307
+ "font_name": "Arial",
1308
+ "color": "222222"
1309
+ },
1310
+ {
1311
+ "text": " Choice",
1312
+ "font_name": "Arial",
1313
+ "color": "222222"
1314
+ }
1315
+ ],
1316
+ "alignment": "left",
1317
+ "style": "Normal"
1318
+ },
1319
+ {
1320
+ "type": "paragraph",
1321
+ "runs": [
1322
+ {
1323
+ "text": "END OF ORDER",
1324
+ "bold": true,
1325
+ "font_size": 20.0,
1326
+ "color": "FF0000"
1327
+ }
1328
+ ],
1329
+ "alignment": "CENTER (1)",
1330
+ "style": "Normal"
1331
+ }
1332
+ ],
1333
+ "metadata": {
1334
+ "title": "",
1335
+ "author": "rick garner"
1336
+ }
1337
+ }
docgen_3_13_25 Anna Schuh N50LB Cutter KPHX 10AM.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"pandoc-api-version":[1,23,1],"meta":{},"blocks":[{"t":"Para","c":[{"t":"Emph","c":[{"t":"Str","c":"**Please"},{"t":"Space"},{"t":"Str","c":"pack"},{"t":"Space"},{"t":"Str","c":"in"},{"t":"Space"},{"t":"Str","c":"bulk"},{"t":"Space"},{"t":"Str","c":"and"},{"t":"Space"},{"t":"Str","c":"in"},{"t":"Space"},{"t":"Str","c":"oven"},{"t":"Space"},{"t":"Str","c":"tins,"},{"t":"Space"},{"t":"Str","c":"when"},{"t":"Space"},{"t":"Str","c":"applicable**"}]}]},{"t":"Para","c":[{"t":"Emph","c":[{"t":"Str","c":"Confirmed"},{"t":"Space"},{"t":"Str","c":"with"},{"t":"Space"},{"t":"Str","c":"Anna,"},{"t":"Space"},{"t":"Str","c":"this"},{"t":"Space"},{"t":"Str","c":"is"},{"t":"Space"},{"t":"Str","c":"10"},{"t":"Space"},{"t":"Str","c":"different"},{"t":"Space"},{"t":"Str","c":"passengers,"},{"t":"Space"},{"t":"Str","c":"so"},{"t":"Space"},{"t":"Str","c":"repeating"},{"t":"Space"},{"t":"Str","c":"menu"},{"t":"Space"},{"t":"Str","c":"is"},{"t":"Space"},{"t":"Str","c":"perfect."}]}]},{"t":"Para","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Strong","c":[{"t":"Str","c":"5"},{"t":"Space"},{"t":"Str","c":"Ceramic"},{"t":"Space"},{"t":"Str","c":"Platters"},{"t":"Space"},{"t":"Str","c":"for"},{"t":"Space"},{"t":"Str","c":"2"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"each"},{"t":"Space"},{"t":"Str","c":"="},{"t":"Space"},{"t":"Str","c":"10"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"TOTAL"},{"t":"Space"},{"t":"Str","c":"on"},{"t":"Space"},{"t":"Str","c":"5"},{"t":"Space"},{"t":"Str","c":"VVIP"},{"t":"Space"},{"t":"Str","c":"Charcuterie"},{"t":"Space"},{"t":"Str","c":"and"},{"t":"Space"},{"t":"Str","c":"Cheese"},{"t":"Space"},{"t":"Str","c":"Platters"}]},{"t":"Space"},{"t":"Str","c":"(1"},{"t":"Space"},{"t":"Str","c":"tray"},{"t":"Space"},{"t":"Str","c":"serving"},{"t":"Space"},{"t":"Str","c":"2"},{"t":"Space"},{"t":"Str","c":"people,"},{"t":"Space"},{"t":"Str","c":"Garnished"},{"t":"Space"},{"t":"Str","c":"Ceramic)"}]},{"t":"BulletList","c":[[{"t":"Para","c":[{"t":"Str","c":"Crackers"},{"t":"Space"},{"t":"Str","c":"in"},{"t":"Space"},{"t":"Str","c":"Bulk"}]}],[{"t":"Para","c":[{"t":"Str","c":"2"},{"t":"Space"},{"t":"Str","c":"Pints"},{"t":"Space"},{"t":"Str","c":"nuts"},{"t":"Space"},{"t":"Str","c":"in"},{"t":"Space"},{"t":"Str","c":"bulk"},{"t":"Space"},{"t":"Str","c":"to"},{"t":"Space"},{"t":"Str","c":"add"}]}]]},{"t":"Para","c":[{"t":"Strong","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Str","c":"5"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"–"},{"t":"Space"},{"t":"Str","c":"Side"},{"t":"Space"},{"t":"Str","c":"Garden"},{"t":"Space"},{"t":"Str","c":"Salads"},{"t":"Space"},{"t":"Str","c":"–"},{"t":"Space"},{"t":"Str","c":"BULK"}]}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Lettuce"},{"t":"Space"},{"t":"Str","c":"–"},{"t":"Space"},{"t":"Str","c":"gallon"},{"t":"Space"},{"t":"Str","c":"zip"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Cherry"},{"t":"Space"},{"t":"Str","c":"Tomatoes"},{"t":"Space"},{"t":"Str","c":"-"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Cucumbers"},{"t":"Space"},{"t":"Str","c":"for"},{"t":"Space"},{"t":"Str","c":"5"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Carrots"},{"t":"Space"},{"t":"Str","c":"-"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"and"},{"t":"Space"},{"t":"Str","c":"sliced"},{"t":"Space"},{"t":"Str","c":"Radish"},{"t":"Space"},{"t":"Str","c":"for"},{"t":"Space"},{"t":"Str","c":"5-"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"with"},{"t":"Space"},{"t":"Str","c":"Balsamic"},{"t":"Space"},{"t":"Str","c":"Vinaigrette"},{"t":"Space"},{"t":"Str","c":"(packed"},{"t":"Space"},{"t":"Str","c":"in"},{"t":"Space"},{"t":"Str","c":"bulk)"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"1"},{"t":"Space"},{"t":"Str","c":"pint"}]},{"t":"Para","c":[{"t":"Strong","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Str","c":"5"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"Side"},{"t":"Space"},{"t":"Str","c":"Caesar"},{"t":"Space"},{"t":"Str","c":"salads"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Bulk"}]}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Romaine"},{"t":"Space"},{"t":"Str","c":"–"},{"t":"Space"},{"t":"Str","c":"gallon"},{"t":"Space"},{"t":"Str","c":"zip,"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"croutons"},{"t":"Space"},{"t":"Str","c":"for"},{"t":"Space"},{"t":"Str","c":"5"},{"t":"Space"},{"t":"Str","c":"salads,"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Parmesan"},{"t":"Space"},{"t":"Str","c":"cheese;"}]},{"t":"Para","c":[{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Caesar"},{"t":"Space"},{"t":"Str","c":"Dressing"},{"t":"Space"},{"t":"Str","c":"for"},{"t":"Space"},{"t":"Str","c":"5"},{"t":"Space"},{"t":"Str","c":"side"},{"t":"Space"},{"t":"Str","c":"salads"},{"t":"Space"},{"t":"Str","c":"–"},{"t":"Space"},{"t":"Str","c":"all"},{"t":"Space"},{"t":"Str","c":"packed"},{"t":"Space"},{"t":"Str","c":"in"},{"t":"Space"},{"t":"Str","c":"bulk)"}]},{"t":"Para","c":[{"t":"Strong","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Str","c":"10"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Prime"},{"t":"Space"},{"t":"Str","c":"Beef"},{"t":"Space"},{"t":"Str","c":"Tenderloin"},{"t":"Space"},{"t":"Str","c":"Filets"}]},{"t":"Space"},{"t":"Str","c":"(cooked"},{"t":"Space"},{"t":"Strong","c":[{"t":"Str","c":"RARE"}]},{"t":"Str","c":")"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"8oz"},{"t":"Space"},{"t":"Str","c":"Servings"},{"t":"Space"},{"t":"Str","c":"–"},{"t":"Space"},{"t":"Str","c":"Oven"},{"t":"Space"},{"t":"Str","c":"Tins"}]},{"t":"Para","c":[{"t":"Strong","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Str","c":"5"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Chicken"},{"t":"Space"},{"t":"Str","c":"Piccata"},{"t":"Space"},{"t":"Str","c":"(not"},{"t":"Space"},{"t":"Str","c":"breaded)"}]},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"with"},{"t":"Space"},{"t":"Str","c":"White"},{"t":"Space"},{"t":"Str","c":"Wine"},{"t":"Space"},{"t":"Str","c":"Lemon"},{"t":"Space"},{"t":"Str","c":"Caper"},{"t":"Space"},{"t":"Str","c":"Sauce"}]},{"t":"BulletList","c":[[{"t":"Para","c":[{"t":"Str","c":"With"},{"t":"Space"},{"t":"Str","c":"garnish"}]}],[{"t":"Para","c":[{"t":"Str","c":"pasta"},{"t":"Space"},{"t":"Str","c":"to"},{"t":"Space"},{"t":"Str","c":"accompany"},{"t":"Space"},{"t":"Str","c":"the"},{"t":"Space"},{"t":"Str","c":"Chicken"},{"t":"Space"},{"t":"Str","c":"Piccata"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"chef’s"},{"t":"Space"},{"t":"Str","c":"choice"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"oven"}]}]]},{"t":"BulletList","c":[[]]},{"t":"Para","c":[{"t":"Strong","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Str","c":"10"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Garlic"},{"t":"Space"},{"t":"Str","c":"Mashed"},{"t":"Space"},{"t":"Str","c":"Potatoes"}]},{"t":"Space"},{"t":"Str","c":"–"},{"t":"Space"},{"t":"Str","c":"Oven"},{"t":"Space"},{"t":"Str","c":"tins"}]},{"t":"Para","c":[{"t":"Strong","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Str","c":"10"},{"t":"Space"},{"t":"Str","c":"pax"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Roasted"},{"t":"Space"},{"t":"Str","c":"Asparagus"}]},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"oven"},{"t":"Space"},{"t":"Str","c":"tins"}]},{"t":"Para","c":[{"t":"Strong","c":[{"t":"Str","c":"_____"},{"t":"Space"},{"t":"Str","c":"10"},{"t":"Space"},{"t":"Str","c":"pax,"},{"t":"Space"},{"t":"Str","c":"2pp"},{"t":"Space"},{"t":"Str","c":"="},{"t":"Space"},{"t":"Str","c":"20"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"Assorted"},{"t":"Space"},{"t":"Str","c":"Brownies"},{"t":"Space"},{"t":"Str","c":"–"}]},{"t":"Space"},{"t":"Str","c":"Bulk"}]},{"t":"Para","c":[{"t":"Str","c":"Please"},{"t":"Space"},{"t":"Str","c":"let"},{"t":"Space"},{"t":"Str","c":"me"},{"t":"Space"},{"t":"Str","c":"know"},{"t":"Space"},{"t":"Str","c":"if"},{"t":"Space"},{"t":"Str","c":"there"},{"t":"Space"},{"t":"Str","c":"are"},{"t":"Space"},{"t":"Str","c":"any"},{"t":"Space"},{"t":"Str","c":"questions!"},{"t":"Space"},{"t":"Str","c":"Thanks"},{"t":"Space"},{"t":"Str","c":"so"},{"t":"Space"},{"t":"Str","c":"much."}]},{"t":"Para","c":[{"t":"Str","c":"Anna"},{"t":"Space"},{"t":"Str","c":"Schuh"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"(859)640-7333"}]},{"t":"Para","c":[{"t":"Str","c":"END"},{"t":"Space"},{"t":"Str","c":"OF"},{"t":"Space"},{"t":"Str","c":"ORDER"},{"t":"Space"},{"t":"Str","c":"-"},{"t":"Space"},{"t":"Str","c":"THANKS😊"}]}]}
docgen_3_13_25 Anna Schuh N50LB Cutter KPHX 10AM.markdown ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *\*\*Please pack in bulk and in oven tins, when applicable\*\**
2
+
3
+ *Confirmed with Anna, this is 10 different passengers, so repeating menu
4
+ is perfect.*
5
+
6
+ \_\_\_\_\_ **5 Ceramic Platters for 2 pax each = 10 pax TOTAL on 5 VVIP
7
+ Charcuterie and Cheese Platters** (1 tray serving 2 people, Garnished
8
+ Ceramic)
9
+
10
+ - Crackers in Bulk
11
+
12
+ - 2 Pints nuts in bulk to add
13
+
14
+ **\_\_\_\_\_ 5 pax -- Side Garden Salads -- BULK**
15
+
16
+ \- Lettuce -- gallon zip
17
+
18
+ \- Cherry Tomatoes -
19
+
20
+ \- Cucumbers for 5
21
+
22
+ \- Carrots -
23
+
24
+ \- and sliced Radish for 5-
25
+
26
+ \- with Balsamic Vinaigrette (packed in bulk) - 1 pint
27
+
28
+ **\_\_\_\_\_ 5 pax Side Caesar salads - Bulk**
29
+
30
+ \- Romaine -- gallon zip,
31
+
32
+ \- croutons for 5 salads,
33
+
34
+ \- Parmesan cheese;
35
+
36
+ \- Caesar Dressing for 5 side salads -- all packed in bulk)
37
+
38
+ **\_\_\_\_\_ 10 pax - Prime Beef Tenderloin Filets** (cooked **RARE**) -
39
+ 8oz Servings -- Oven Tins
40
+
41
+ **\_\_\_\_\_ 5 pax - Chicken Piccata (not breaded)** - with White Wine
42
+ Lemon Caper Sauce
43
+
44
+ - With garnish
45
+
46
+ - pasta to accompany the Chicken Piccata - chef's choice - oven
47
+
48
+ <!-- -->
49
+
50
+ -
51
+
52
+ **\_\_\_\_\_ 10 pax - Garlic Mashed Potatoes** -- Oven tins
53
+
54
+ **\_\_\_\_\_ 10 pax - Roasted Asparagus** - oven tins
55
+
56
+ **\_\_\_\_\_ 10 pax, 2pp = 20 - Assorted Brownies --** Bulk
57
+
58
+ Please let me know if there are any questions! Thanks so much.
59
+
60
+ Anna Schuh - (859)640-7333
61
+
62
+ END OF ORDER - THANKS😊
docgen_4_3_25_TBD_JetAviation_Order.json ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "sections": [
3
+ {
4
+ "header": [],
5
+ "first_page_header": [],
6
+ "even_page_header": [],
7
+ "footer": [],
8
+ "first_page_footer": [],
9
+ "even_page_footer": []
10
+ }
11
+ ],
12
+ "body": [
13
+ {
14
+ "type": "heading",
15
+ "level": 1,
16
+ "runs": [
17
+ {
18
+ "text": "TBD / TAIL# TBD / THURSDAY 4-3-25 @ 3PM / JET AVIATION – SCOTTSDALE, AZ"
19
+ }
20
+ ],
21
+ "alignment": "left",
22
+ "style": "Heading 1"
23
+ },
24
+ {
25
+ "type": "paragraph",
26
+ "runs": [
27
+ {
28
+ "text": "14650 N. Airport Drive, Scottsdale, AZ 85260"
29
+ }
30
+ ],
31
+ "alignment": "left",
32
+ "style": "Normal"
33
+ },
34
+ {
35
+ "type": "paragraph",
36
+ "runs": [
37
+ {
38
+ "text": "GUEST COUNT: 2"
39
+ }
40
+ ],
41
+ "alignment": "left",
42
+ "style": "Normal"
43
+ },
44
+ {
45
+ "type": "paragraph",
46
+ "runs": [
47
+ {
48
+ "text": "\nORDER:\n_Please pack in appropriate containers (microwave-safe unless otherwise specified)._"
49
+ }
50
+ ],
51
+ "alignment": "left",
52
+ "style": "Normal"
53
+ },
54
+ {
55
+ "type": "paragraph",
56
+ "runs": [
57
+ {
58
+ "text": "\nEntrées:"
59
+ }
60
+ ],
61
+ "alignment": "left",
62
+ "style": "Normal"
63
+ },
64
+ {
65
+ "type": "list_item",
66
+ "list_type": "bullet",
67
+ "runs": [
68
+ {
69
+ "text": "- 2 pax – Prime Cut Filet Entrees"
70
+ }
71
+ ],
72
+ "alignment": "left",
73
+ "style": "List Bullet"
74
+ },
75
+ {
76
+ "type": "paragraph",
77
+ "runs": [
78
+ {
79
+ "text": "\nSides:"
80
+ }
81
+ ],
82
+ "alignment": "left",
83
+ "style": "Normal"
84
+ },
85
+ {
86
+ "type": "list_item",
87
+ "list_type": "bullet",
88
+ "runs": [
89
+ {
90
+ "text": "- 2 pax – Chicken Tortilla Soup"
91
+ }
92
+ ],
93
+ "alignment": "left",
94
+ "style": "List Bullet"
95
+ },
96
+ {
97
+ "type": "list_item",
98
+ "list_type": "bullet",
99
+ "runs": [
100
+ {
101
+ "text": "- 2 pax – Grilled Asparagus"
102
+ }
103
+ ],
104
+ "alignment": "left",
105
+ "style": "List Bullet"
106
+ },
107
+ {
108
+ "type": "list_item",
109
+ "list_type": "bullet",
110
+ "runs": [
111
+ {
112
+ "text": "- 2 pax – Jasmine Rice"
113
+ }
114
+ ],
115
+ "alignment": "left",
116
+ "style": "List Bullet"
117
+ },
118
+ {
119
+ "type": "paragraph",
120
+ "runs": [
121
+ {
122
+ "text": "\nDessert:"
123
+ }
124
+ ],
125
+ "alignment": "left",
126
+ "style": "Normal"
127
+ },
128
+ {
129
+ "type": "list_item",
130
+ "list_type": "bullet",
131
+ "runs": [
132
+ {
133
+ "text": "- 2 pax – Flourless Chocolate Cake"
134
+ }
135
+ ],
136
+ "alignment": "left",
137
+ "style": "List Bullet"
138
+ },
139
+ {
140
+ "type": "paragraph",
141
+ "runs": [
142
+ {
143
+ "text": "\nNOTES:"
144
+ }
145
+ ],
146
+ "alignment": "left",
147
+ "style": "Normal"
148
+ },
149
+ {
150
+ "type": "list_item",
151
+ "list_type": "bullet",
152
+ "runs": [
153
+ {
154
+ "text": "- No tail number confirmed at this time."
155
+ }
156
+ ],
157
+ "alignment": "left",
158
+ "style": "List Bullet"
159
+ },
160
+ {
161
+ "type": "list_item",
162
+ "list_type": "bullet",
163
+ "runs": [
164
+ {
165
+ "text": "- Please label containers clearly for ease of plating."
166
+ }
167
+ ],
168
+ "alignment": "left",
169
+ "style": "List Bullet"
170
+ },
171
+ {
172
+ "type": "paragraph",
173
+ "runs": [
174
+ {
175
+ "text": "\nEND OF ORDER – THANK YOU!"
176
+ }
177
+ ],
178
+ "alignment": "left",
179
+ "style": "Normal"
180
+ }
181
+ ],
182
+ "metadata": {
183
+ "title": "",
184
+ "author": "python-docx"
185
+ }
186
+ }
docgen_docgen_3_12-25 Steve Hepner Tail N15Y Jackson Jet 2.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:104ae57e887783bf4c9df36d530dc2f09d167dea932f55065ece01bc479473bc
3
+ size 39673
docgen_docgen_3_12-25 Steve Hepner Tail N15Y Jackson Jet 3.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a4cb474ba944a7ebf1c94d43ce6c7c3c52e2ae3e4c70388d5cd676b8693deee
3
+ size 41520
docgen_docgen_3_12-25 Steve Hepner Tail N15Y Jackson Jet.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d674cb219c5754530bf63b6c3a2b130e4fb4535811be2c6c7f590e0a2735d087
3
+ size 11792
docgen_test3_12-25 Steve Hepner Tail N15Y Jackson Jet 1.docx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19fbc7e3a97b13790f25da1cebff1ab580fd7ff3fa1514fbd74e0bf5ba4afbe9
3
+ size 11792
package.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "gradio_client",
3
+ "version": "1.9.0",
4
+ "description": "",
5
+ "python": "true",
6
+ "main_changeset": true,
7
+ "private": true
8
+ }
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ gradio>=5.13.2
2
+ pypandoc
3
+ pdf2docx
4
+ python-docx
5
+ flask
6
+ python-multipart
setuptools.schema.json ADDED
@@ -0,0 +1,433 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+
4
+ "$id": "https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html",
5
+ "title": "``tool.setuptools`` table",
6
+ "$$description": [
7
+ "``setuptools``-specific configurations that can be set by users that require",
8
+ "customization.",
9
+ "These configurations are completely optional and probably can be skipped when",
10
+ "creating simple packages. They are equivalent to some of the `Keywords",
11
+ "<https://setuptools.pypa.io/en/latest/references/keywords.html>`_",
12
+ "used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.",
13
+ "It considers only ``setuptools`` `parameters",
14
+ "<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#setuptools-specific-configuration>`_",
15
+ "that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``",
16
+ "and ``setup_requires`` (incompatible with modern workflows/standards)."
17
+ ],
18
+
19
+ "type": "object",
20
+ "additionalProperties": false,
21
+ "properties": {
22
+ "platforms": {
23
+ "type": "array",
24
+ "items": {"type": "string"}
25
+ },
26
+ "provides": {
27
+ "$$description": [
28
+ "Package and virtual package names contained within this package",
29
+ "**(not supported by pip)**"
30
+ ],
31
+ "type": "array",
32
+ "items": {"type": "string", "format": "pep508-identifier"}
33
+ },
34
+ "obsoletes": {
35
+ "$$description": [
36
+ "Packages which this package renders obsolete",
37
+ "**(not supported by pip)**"
38
+ ],
39
+ "type": "array",
40
+ "items": {"type": "string", "format": "pep508-identifier"}
41
+ },
42
+ "zip-safe": {
43
+ "$$description": [
44
+ "Whether the project can be safely installed and run from a zip file.",
45
+ "**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
46
+ "``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
47
+ ],
48
+ "type": "boolean"
49
+ },
50
+ "script-files": {
51
+ "$$description": [
52
+ "Legacy way of defining scripts (entry-points are preferred).",
53
+ "Equivalent to the ``script`` keyword in ``setup.py``",
54
+ "(it was renamed to avoid confusion with entry-point based ``project.scripts``",
55
+ "defined in :pep:`621`).",
56
+ "**DISCOURAGED**: generic script wrappers are tricky and may not work properly.",
57
+ "Whenever possible, please use ``project.scripts`` instead."
58
+ ],
59
+ "type": "array",
60
+ "items": {"type": "string"},
61
+ "$comment": "TODO: is this field deprecated/should be removed?"
62
+ },
63
+ "eager-resources": {
64
+ "$$description": [
65
+ "Resources that should be extracted together, if any of them is needed,",
66
+ "or if any C extensions included in the project are imported.",
67
+ "**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
68
+ "``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
69
+ ],
70
+ "type": "array",
71
+ "items": {"type": "string"}
72
+ },
73
+ "packages": {
74
+ "$$description": [
75
+ "Packages that should be included in the distribution.",
76
+ "It can be given either as a list of package identifiers",
77
+ "or as a ``dict``-like structure with a single key ``find``",
78
+ "which corresponds to a dynamic call to",
79
+ "``setuptools.config.expand.find_packages`` function.",
80
+ "The ``find`` key is associated with a nested ``dict``-like structure that can",
81
+ "contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,",
82
+ "mimicking the keyword arguments of the associated function."
83
+ ],
84
+ "oneOf": [
85
+ {
86
+ "title": "Array of Python package identifiers",
87
+ "type": "array",
88
+ "items": {"$ref": "#/definitions/package-name"}
89
+ },
90
+ {"$ref": "#/definitions/find-directive"}
91
+ ]
92
+ },
93
+ "package-dir": {
94
+ "$$description": [
95
+ ":class:`dict`-like structure mapping from package names to directories where their",
96
+ "code can be found.",
97
+ "The empty string (as key) means that all packages are contained inside",
98
+ "the given directory will be included in the distribution."
99
+ ],
100
+ "type": "object",
101
+ "additionalProperties": false,
102
+ "propertyNames": {
103
+ "anyOf": [{"const": ""}, {"$ref": "#/definitions/package-name"}]
104
+ },
105
+ "patternProperties": {
106
+ "^.*$": {"type": "string" }
107
+ }
108
+ },
109
+ "package-data": {
110
+ "$$description": [
111
+ "Mapping from package names to lists of glob patterns.",
112
+ "Usually this option is not needed when using ``include-package-data = true``",
113
+ "For more information on how to include data files, check ``setuptools`` `docs",
114
+ "<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
115
+ ],
116
+ "type": "object",
117
+ "additionalProperties": false,
118
+ "propertyNames": {
119
+ "anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
120
+ },
121
+ "patternProperties": {
122
+ "^.*$": {"type": "array", "items": {"type": "string"}}
123
+ }
124
+ },
125
+ "include-package-data": {
126
+ "$$description": [
127
+ "Automatically include any data files inside the package directories",
128
+ "that are specified by ``MANIFEST.in``",
129
+ "For more information on how to include data files, check ``setuptools`` `docs",
130
+ "<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
131
+ ],
132
+ "type": "boolean"
133
+ },
134
+ "exclude-package-data": {
135
+ "$$description": [
136
+ "Mapping from package names to lists of glob patterns that should be excluded",
137
+ "For more information on how to include data files, check ``setuptools`` `docs",
138
+ "<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
139
+ ],
140
+ "type": "object",
141
+ "additionalProperties": false,
142
+ "propertyNames": {
143
+ "anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
144
+ },
145
+ "patternProperties": {
146
+ "^.*$": {"type": "array", "items": {"type": "string"}}
147
+ }
148
+ },
149
+ "namespace-packages": {
150
+ "type": "array",
151
+ "items": {"type": "string", "format": "python-module-name-relaxed"},
152
+ "$comment": "https://setuptools.pypa.io/en/latest/userguide/package_discovery.html",
153
+ "description": "**DEPRECATED**: use implicit namespaces instead (:pep:`420`)."
154
+ },
155
+ "py-modules": {
156
+ "description": "Modules that setuptools will manipulate",
157
+ "type": "array",
158
+ "items": {"type": "string", "format": "python-module-name-relaxed"},
159
+ "$comment": "TODO: clarify the relationship with ``packages``"
160
+ },
161
+ "ext-modules": {
162
+ "description": "Extension modules to be compiled by setuptools",
163
+ "type": "array",
164
+ "items": {"$ref": "#/definitions/ext-module"}
165
+ },
166
+ "data-files": {
167
+ "$$description": [
168
+ "``dict``-like structure where each key represents a directory and",
169
+ "the value is a list of glob patterns that should be installed in them.",
170
+ "**DISCOURAGED**: please notice this might not work as expected with wheels.",
171
+ "Whenever possible, consider using data files inside the package directories",
172
+ "(or create a new namespace package that only contains data files).",
173
+ "See `data files support",
174
+ "<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
175
+ ],
176
+ "type": "object",
177
+ "patternProperties": {
178
+ "^.*$": {"type": "array", "items": {"type": "string"}}
179
+ }
180
+ },
181
+ "cmdclass": {
182
+ "$$description": [
183
+ "Mapping of distutils-style command names to ``setuptools.Command`` subclasses",
184
+ "which in turn should be represented by strings with a qualified class name",
185
+ "(i.e., \"dotted\" form with module), e.g.::\n\n",
186
+ " cmdclass = {mycmd = \"pkg.subpkg.module.CommandClass\"}\n\n",
187
+ "The command class should be a directly defined at the top-level of the",
188
+ "containing module (no class nesting)."
189
+ ],
190
+ "type": "object",
191
+ "patternProperties": {
192
+ "^.*$": {"type": "string", "format": "python-qualified-identifier"}
193
+ }
194
+ },
195
+ "license-files": {
196
+ "type": "array",
197
+ "items": {"type": "string"},
198
+ "$$description": [
199
+ "**PROVISIONAL**: list of glob patterns for all license files being distributed.",
200
+ "(likely to become standard with :pep:`639`).",
201
+ "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"
202
+ ],
203
+ "$comment": "TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?"
204
+ },
205
+ "dynamic": {
206
+ "type": "object",
207
+ "description": "Instructions for loading :pep:`621`-related metadata dynamically",
208
+ "additionalProperties": false,
209
+ "properties": {
210
+ "version": {
211
+ "$$description": [
212
+ "A version dynamically loaded via either the ``attr:`` or ``file:``",
213
+ "directives. Please make sure the given file or attribute respects :pep:`440`.",
214
+ "Also ensure to set ``project.dynamic`` accordingly."
215
+ ],
216
+ "oneOf": [
217
+ {"$ref": "#/definitions/attr-directive"},
218
+ {"$ref": "#/definitions/file-directive"}
219
+ ]
220
+ },
221
+ "classifiers": {"$ref": "#/definitions/file-directive"},
222
+ "description": {"$ref": "#/definitions/file-directive"},
223
+ "entry-points": {"$ref": "#/definitions/file-directive"},
224
+ "dependencies": {"$ref": "#/definitions/file-directive-for-dependencies"},
225
+ "optional-dependencies": {
226
+ "type": "object",
227
+ "propertyNames": {"type": "string", "format": "pep508-identifier"},
228
+ "additionalProperties": false,
229
+ "patternProperties": {
230
+ ".+": {"$ref": "#/definitions/file-directive-for-dependencies"}
231
+ }
232
+ },
233
+ "readme": {
234
+ "type": "object",
235
+ "anyOf": [
236
+ {"$ref": "#/definitions/file-directive"},
237
+ {
238
+ "type": "object",
239
+ "properties": {
240
+ "content-type": {"type": "string"},
241
+ "file": { "$ref": "#/definitions/file-directive/properties/file" }
242
+ },
243
+ "additionalProperties": false}
244
+ ],
245
+ "required": ["file"]
246
+ }
247
+ }
248
+ }
249
+ },
250
+
251
+ "definitions": {
252
+ "package-name": {
253
+ "$id": "#/definitions/package-name",
254
+ "title": "Valid package name",
255
+ "description": "Valid package name (importable or :pep:`561`).",
256
+ "type": "string",
257
+ "anyOf": [
258
+ {"type": "string", "format": "python-module-name-relaxed"},
259
+ {"type": "string", "format": "pep561-stub-name"}
260
+ ]
261
+ },
262
+ "ext-module": {
263
+ "$id": "#/definitions/ext-module",
264
+ "title": "Extension module",
265
+ "description": "Parameters to construct a :class:`setuptools.Extension` object",
266
+ "type": "object",
267
+ "required": ["name", "sources"],
268
+ "additionalProperties": false,
269
+ "properties": {
270
+ "name": {
271
+ "type": "string",
272
+ "format": "python-module-name-relaxed"
273
+ },
274
+ "sources": {
275
+ "type": "array",
276
+ "items": {"type": "string"}
277
+ },
278
+ "include-dirs":{
279
+ "type": "array",
280
+ "items": {"type": "string"}
281
+ },
282
+ "define-macros": {
283
+ "type": "array",
284
+ "items": {
285
+ "type": "array",
286
+ "items": [
287
+ {"description": "macro name", "type": "string"},
288
+ {"description": "macro value", "oneOf": [{"type": "string"}, {"type": "null"}]}
289
+ ],
290
+ "additionalItems": false
291
+ }
292
+ },
293
+ "undef-macros": {
294
+ "type": "array",
295
+ "items": {"type": "string"}
296
+ },
297
+ "library-dirs": {
298
+ "type": "array",
299
+ "items": {"type": "string"}
300
+ },
301
+ "libraries": {
302
+ "type": "array",
303
+ "items": {"type": "string"}
304
+ },
305
+ "runtime-library-dirs": {
306
+ "type": "array",
307
+ "items": {"type": "string"}
308
+ },
309
+ "extra-objects": {
310
+ "type": "array",
311
+ "items": {"type": "string"}
312
+ },
313
+ "extra-compile-args": {
314
+ "type": "array",
315
+ "items": {"type": "string"}
316
+ },
317
+ "extra-link-args": {
318
+ "type": "array",
319
+ "items": {"type": "string"}
320
+ },
321
+ "export-symbols": {
322
+ "type": "array",
323
+ "items": {"type": "string"}
324
+ },
325
+ "swig-opts": {
326
+ "type": "array",
327
+ "items": {"type": "string"}
328
+ },
329
+ "depends": {
330
+ "type": "array",
331
+ "items": {"type": "string"}
332
+ },
333
+ "language": {"type": "string"},
334
+ "optional": {"type": "boolean"},
335
+ "py-limited-api": {"type": "boolean"}
336
+ }
337
+ },
338
+ "file-directive": {
339
+ "$id": "#/definitions/file-directive",
340
+ "title": "'file:' directive",
341
+ "description":
342
+ "Value is read from a file (or list of files and then concatenated)",
343
+ "type": "object",
344
+ "additionalProperties": false,
345
+ "properties": {
346
+ "file": {
347
+ "oneOf": [
348
+ {"type": "string"},
349
+ {"type": "array", "items": {"type": "string"}}
350
+ ]
351
+ }
352
+ },
353
+ "required": ["file"]
354
+ },
355
+ "file-directive-for-dependencies": {
356
+ "title": "'file:' directive for dependencies",
357
+ "allOf": [
358
+ {
359
+ "$$description": [
360
+ "**BETA**: subset of the ``requirements.txt`` format",
361
+ "without ``pip`` flags and options",
362
+ "(one :pep:`508`-compliant string per line,",
363
+ "lines that are blank or start with ``#`` are excluded).",
364
+ "See `dynamic metadata",
365
+ "<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#dynamic-metadata>`_."
366
+ ]
367
+ },
368
+ {"$ref": "#/definitions/file-directive"}
369
+ ]
370
+ },
371
+ "attr-directive": {
372
+ "title": "'attr:' directive",
373
+ "$id": "#/definitions/attr-directive",
374
+ "$$description": [
375
+ "Value is read from a module attribute. Supports callables and iterables;",
376
+ "unsupported types are cast via ``str()``"
377
+ ],
378
+ "type": "object",
379
+ "additionalProperties": false,
380
+ "properties": {
381
+ "attr": {"type": "string", "format": "python-qualified-identifier"}
382
+ },
383
+ "required": ["attr"]
384
+ },
385
+ "find-directive": {
386
+ "$id": "#/definitions/find-directive",
387
+ "title": "'find:' directive",
388
+ "type": "object",
389
+ "additionalProperties": false,
390
+ "properties": {
391
+ "find": {
392
+ "type": "object",
393
+ "$$description": [
394
+ "Dynamic `package discovery",
395
+ "<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_."
396
+ ],
397
+ "additionalProperties": false,
398
+ "properties": {
399
+ "where": {
400
+ "description":
401
+ "Directories to be searched for packages (Unix-style relative path)",
402
+ "type": "array",
403
+ "items": {"type": "string"}
404
+ },
405
+ "exclude": {
406
+ "type": "array",
407
+ "$$description": [
408
+ "Exclude packages that match the values listed in this field.",
409
+ "Can container shell-style wildcards (e.g. ``'pkg.*'``)"
410
+ ],
411
+ "items": {"type": "string"}
412
+ },
413
+ "include": {
414
+ "type": "array",
415
+ "$$description": [
416
+ "Restrict the found packages to just the ones listed in this field.",
417
+ "Can container shell-style wildcards (e.g. ``'pkg.*'``)"
418
+ ],
419
+ "items": {"type": "string"}
420
+ },
421
+ "namespaces": {
422
+ "type": "boolean",
423
+ "$$description": [
424
+ "When ``True``, directories without a ``__init__.py`` file will also",
425
+ "be scanned for :pep:`420`-style implicit namespaces"
426
+ ]
427
+ }
428
+ }
429
+ }
430
+ }
431
+ }
432
+ }
433
+ }
tsconfig.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "include": ["src/**/*"],
3
+ "exclude": ["src/**/*.test.ts", "src/**/*.node-test.ts"],
4
+ "compilerOptions": {
5
+ "allowJs": true,
6
+ "declaration": true,
7
+ "emitDeclarationOnly": true,
8
+ "outDir": "dist",
9
+ "declarationMap": true,
10
+ "module": "ESNext",
11
+ "target": "ES2020",
12
+ "useDefineForClassFields": true,
13
+ "lib": ["ES2020", "DOM", "DOM.Iterable"],
14
+ "skipLibCheck": true,
15
+
16
+ /* Bundler */
17
+ "moduleResolution": "Bundler",
18
+ "skipDefaultLibCheck": true,
19
+ "allowImportingTsExtensions": true,
20
+ "esModuleInterop": true,
21
+ "resolveJsonModule": true,
22
+ "isolatedModules": true,
23
+
24
+ /* Linting */
25
+ "strict": true
26
+ }
27
+ }
types.json ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "SimpleSerializable": {
3
+ "type": {},
4
+ "description": "any valid value"
5
+ },
6
+ "StringSerializable": {
7
+ "type": "string"
8
+ },
9
+ "ListStringSerializable": {
10
+ "type": "array",
11
+ "items": {
12
+ "type": "string"
13
+ }
14
+ },
15
+ "BooleanSerializable": {
16
+ "type": "boolean"
17
+ },
18
+ "NumberSerializable": {
19
+ "type": "number"
20
+ },
21
+ "ImgSerializable": {
22
+ "type": "string",
23
+ "description": "base64 representation of an image"
24
+ },
25
+ "FileSerializable": {
26
+ "oneOf": [
27
+ {
28
+ "type": "string",
29
+ "description": "filepath on your computer (or URL) of file"
30
+ },
31
+ {
32
+ "type": "object",
33
+ "properties": {
34
+ "name": { "type": "string", "description": "name of file" },
35
+ "data": {
36
+ "type": "string",
37
+ "description": "base64 representation of file"
38
+ },
39
+ "size": {
40
+ "type": "integer",
41
+ "description": "size of image in bytes"
42
+ },
43
+ "is_file": {
44
+ "type": "boolean",
45
+ "description": "true if the file has been uploaded to the server"
46
+ },
47
+ "orig_name": {
48
+ "type": "string",
49
+ "description": "original name of the file"
50
+ }
51
+ },
52
+ "required": ["name", "data"]
53
+ },
54
+ {
55
+ "type": "array",
56
+ "items": {
57
+ "anyOf": [
58
+ {
59
+ "type": "string",
60
+ "description": "filepath on your computer (or URL) of file"
61
+ },
62
+ {
63
+ "type": "object",
64
+ "properties": {
65
+ "name": { "type": "string", "description": "name of file" },
66
+ "data": {
67
+ "type": "string",
68
+ "description": "base64 representation of file"
69
+ },
70
+ "size": {
71
+ "type": "integer",
72
+ "description": "size of image in bytes"
73
+ },
74
+ "is_file": {
75
+ "type": "boolean",
76
+ "description": "true if the file has been uploaded to the server"
77
+ },
78
+ "orig_name": {
79
+ "type": "string",
80
+ "description": "original name of the file"
81
+ }
82
+ },
83
+ "required": ["name", "data"]
84
+ }
85
+ ]
86
+ }
87
+ }
88
+ ]
89
+ },
90
+ "SingleFileSerializable": {
91
+ "oneOf": [
92
+ {
93
+ "type": "string",
94
+ "description": "filepath on your computer (or URL) of file"
95
+ },
96
+ {
97
+ "type": "object",
98
+ "properties": {
99
+ "name": { "type": "string", "description": "name of file" },
100
+ "data": {
101
+ "type": "string",
102
+ "description": "base64 representation of file"
103
+ },
104
+ "size": {
105
+ "type": "integer",
106
+ "description": "size of image in bytes"
107
+ },
108
+ "is_file": {
109
+ "type": "boolean",
110
+ "description": "true if the file has been uploaded to the server"
111
+ },
112
+ "orig_name": {
113
+ "type": "string",
114
+ "description": "original name of the file"
115
+ }
116
+ },
117
+ "required": ["name", "data"]
118
+ }
119
+ ]
120
+ },
121
+ "MultipleFileSerializable": {
122
+ "type": "array",
123
+ "items": {
124
+ "anyOf": [
125
+ {
126
+ "type": "string",
127
+ "description": "filepath on your computer (or URL) of file"
128
+ },
129
+ {
130
+ "type": "object",
131
+ "properties": {
132
+ "name": { "type": "string", "description": "name of file" },
133
+ "data": {
134
+ "type": "string",
135
+ "description": "base64 representation of file"
136
+ },
137
+ "size": {
138
+ "type": "integer",
139
+ "description": "size of image in bytes"
140
+ },
141
+ "is_file": {
142
+ "type": "boolean",
143
+ "description": "true if the file has been uploaded to the server"
144
+ },
145
+ "orig_name": {
146
+ "type": "string",
147
+ "description": "original name of the file"
148
+ }
149
+ },
150
+ "required": ["name", "data"]
151
+ }
152
+ ]
153
+ }
154
+ },
155
+ "JSONSerializable": {
156
+ "type": {},
157
+ "description": "any valid json"
158
+ },
159
+ "GallerySerializable": {
160
+ "type": "array",
161
+ "items": {
162
+ "type": "array",
163
+ "items": false,
164
+ "maxSize": 2,
165
+ "minSize": 2,
166
+ "prefixItems": [
167
+ {
168
+ "type": "object",
169
+ "properties": {
170
+ "name": { "type": "string", "description": "name of file" },
171
+ "data": {
172
+ "type": "string",
173
+ "description": "base64 representation of file"
174
+ },
175
+ "size": {
176
+ "type": "integer",
177
+ "description": "size of image in bytes"
178
+ },
179
+ "is_file": {
180
+ "type": "boolean",
181
+ "description": "true if the file has been uploaded to the server"
182
+ },
183
+ "orig_name": {
184
+ "type": "string",
185
+ "description": "original name of the file"
186
+ }
187
+ },
188
+ "required": ["name", "data"]
189
+ },
190
+ {
191
+ "oneOf": [
192
+ { "type": "string", "description": "caption of image" },
193
+ { "type": "null" }
194
+ ]
195
+ }
196
+ ]
197
+ }
198
+ }
199
+ }
version.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version":"1745607013632"}